1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Primitives;
using Xunit;
namespace Microsoft.AspNetCore.Routing;
public class PathTokenizerTest
{
public static TheoryData<string, StringSegment[]> TokenizationData
{
get
{
return new TheoryData<string, StringSegment[]>
{
{ string.Empty, new StringSegment[] { } },
{ "/", new StringSegment[] { } },
{ "//", new StringSegment[] { new StringSegment("//", 1, 0) } },
{
"///",
new StringSegment[]
{
new StringSegment("///", 1, 0),
new StringSegment("///", 2, 0),
}
},
{
"////",
new StringSegment[]
{
new StringSegment("////", 1, 0),
new StringSegment("////", 2, 0),
new StringSegment("////", 3, 0),
}
},
{ "/zero", new StringSegment[] { new StringSegment("/zero", 1, 4) } },
{ "/zero/", new StringSegment[] { new StringSegment("/zero/", 1, 4) } },
{
"/zero/one",
new StringSegment[]
{
new StringSegment("/zero/one", 1, 4),
new StringSegment("/zero/one", 6, 3),
}
},
{
"/zero/one/",
new StringSegment[]
{
new StringSegment("/zero/one/", 1, 4),
new StringSegment("/zero/one/", 6, 3),
}
},
{
"/zero/one/two",
new StringSegment[]
{
new StringSegment("/zero/one/two", 1, 4),
new StringSegment("/zero/one/two", 6, 3),
new StringSegment("/zero/one/two", 10, 3),
}
},
{
"/zero/one/two/",
new StringSegment[]
{
new StringSegment("/zero/one/two/", 1, 4),
new StringSegment("/zero/one/two/", 6, 3),
new StringSegment("/zero/one/two/", 10, 3),
}
},
};
}
}
[Theory]
[MemberData(nameof(TokenizationData))]
public void PathTokenizer_Count(string path, StringSegment[] expectedSegments)
{
// Arrange
var tokenizer = new PathTokenizer(new PathString(path));
// Act
var count = tokenizer.Count;
// Assert
Assert.Equal(expectedSegments.Length, count);
}
[Theory]
[MemberData(nameof(TokenizationData))]
public void PathTokenizer_Indexer(string path, StringSegment[] expectedSegments)
{
// Arrange
var tokenizer = new PathTokenizer(new PathString(path));
// Act & Assert
for (var i = 0; i < expectedSegments.Length; i++)
{
Assert.Equal(expectedSegments[i], tokenizer[i]);
}
}
[Theory]
[MemberData(nameof(TokenizationData))]
public void PathTokenizer_Enumerator(string path, StringSegment[] expectedSegments)
{
// Arrange
var tokenizer = new PathTokenizer(new PathString(path));
// Act & Assert
Assert.Equal<StringSegment>(expectedSegments, tokenizer);
}
}
|