Skip to content

Commit a0496ef

Browse files
author
邹嵩
committed
异常信息不应该用.结尾
1 parent d4ccb7c commit a0496ef

35 files changed

+75
-75
lines changed

nuget/DotnetSpider.Core.nuspec

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
<package xmlns="http://schemas.microsoft.com/packaging/2012/06/nuspec.xsd">
33
<metadata>
44
<id>DotnetSpider2.Core</id>
5-
<version>2.4.2-beta7</version>
5+
<version>2.4.3</version>
66
<authors>[email protected];Walterwhatwater;xiaohuan0204</authors>
77
<owners>[email protected]</owners>
88
<iconUrl>https://github.com/zlzforever/DotnetSpider/blob/master/images/icon.png?raw=true</iconUrl>

nuget/DotnetSpider.Extension.nuspec

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
<package xmlns="http://schemas.microsoft.com/packaging/2012/06/nuspec.xsd">
33
<metadata>
44
<id>DotnetSpider2.Extension</id>
5-
<version>2.4.2-beta7</version>
5+
<version>2.4.3</version>
66
<authors>[email protected];Walterwhatwater;xiaohuan0204</authors>
77
<owners>[email protected]</owners>
88
<iconUrl>https://github.com/zlzforever/DotnetSpider/blob/master/images/icon.png?raw=true</iconUrl>
@@ -13,7 +13,7 @@
1313
<description>A .NET Standard web crawling library similar to WebMagic and Scrapy. It is a lightweight ,efficient and fast high-level web crawling &amp; scraping framework for .NET</description>
1414
<dependencies>
1515
<group targetFramework=".NETStandard2.0">
16-
<dependency id="DotnetSpider2.Core" version="2.4.2-beta7" />
16+
<dependency id="DotnetSpider2.Core" version="2.4.3" />
1717
<dependency id="Dapper" version="1.50.2"/>
1818
<dependency id="MailKit" version="2.0.1"/>
1919
<dependency id="MongoDB.Driver" version="2.5.0"/>
@@ -28,7 +28,7 @@
2828
<dependency id="MessagePack" version="1.7.3.4"/>
2929
</group>
3030
<group targetFramework=".NETFramework4.5" >
31-
<dependency id="DotnetSpider2.Core" version="2.4.2-beta7" />
31+
<dependency id="DotnetSpider2.Core" version="2.4.3" />
3232
<dependency id="Dapper" version="1.50.2"/>
3333
<dependency id="MailKit" version="2.0.1"/>
3434
<dependency id="MongoDB.Driver" version="2.5.0"/>

src/DotnetSpider.Core/Downloader/AfterDownloadCompleteHandler.Extensions.cs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,7 @@ public RetryWhenContainsHandler(params string[] contents)
370370
{
371371
if (contents == null || contents.Length == 0)
372372
{
373-
throw new SpiderException("contents should not be empty/null.");
373+
throw new SpiderException("contents should not be empty/null");
374374
}
375375

376376
_contents = contents;
@@ -468,7 +468,7 @@ public RedialWhenExceptionThrowHandler(string exceptionMessage)
468468
{
469469
if (string.IsNullOrWhiteSpace(exceptionMessage))
470470
{
471-
throw new SpiderException("exceptionMessage should not be null or empty.");
471+
throw new SpiderException("exceptionMessage should not be null or empty");
472472
}
473473

474474
_exceptionMessage = exceptionMessage;
@@ -521,10 +521,10 @@ public class RedialAndUpdateCookiesWhenContainsHandler : AfterDownloadCompleteHa
521521
/// <param name="contents">包含的内容(specified contents)</param>
522522
public RedialAndUpdateCookiesWhenContainsHandler(ICookieInjector cookieInjector, params string[] contents)
523523
{
524-
_cookieInjector = cookieInjector ?? throw new SpiderException("cookieInjector should not be null.");
524+
_cookieInjector = cookieInjector ?? throw new SpiderException("cookieInjector should not be null");
525525
if (contents == null || contents.Length == 0)
526526
{
527-
throw new SpiderException("contents should not be null or empty.");
527+
throw new SpiderException("contents should not be null or empty");
528528
}
529529

530530
_contents = contents;
@@ -616,7 +616,7 @@ public override void Handle(ref Page page, IDownloader downloader, ISpider spide
616616

617617
if (begin < 0)
618618
{
619-
throw new SpiderException($"Cutout failed, can not find begin string: {_startPart}.");
619+
throw new SpiderException($"Cutout failed, can not find begin string: {_startPart}");
620620
}
621621

622622
int end = rawText.IndexOf(_endPart, begin, StringComparison.Ordinal);
@@ -629,7 +629,7 @@ public override void Handle(ref Page page, IDownloader downloader, ISpider spide
629629

630630
if (begin < 0 || length < 0)
631631
{
632-
throw new SpiderException("Cutout failed. Please check your settings.");
632+
throw new SpiderException("Cutout failed. Please check your settings");
633633
}
634634

635635
string newRawText = rawText.Substring(begin, length).Trim();

src/DotnetSpider.Core/Downloader/HttpClientPool.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,11 @@ public HttpClientElement GetHttpClient(ISpider spider, IDownloader downloader, C
4343
{
4444
if (cookieContainer == null)
4545
{
46-
throw new SpiderException($"{nameof(cookieContainer)} should not be null.");
46+
throw new SpiderException($"{nameof(cookieContainer)} should not be null");
4747
}
4848
if (downloader == null)
4949
{
50-
throw new SpiderException($"{nameof(downloader)} should not be null.");
50+
throw new SpiderException($"{nameof(downloader)} should not be null");
5151
}
5252
var newCookieContainer = GenerateNewCookieContainer(spider, downloader, cookieContainer, cookieInjector);
5353

src/DotnetSpider.Core/Infrastructure/BlockingQueue.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,7 @@ public class QueueTimeoutException : SpiderException
402402
/// <summary>
403403
/// 构造方法
404404
/// </summary>
405-
public QueueTimeoutException() : base("Queue method timed out on wait.")
405+
public QueueTimeoutException() : base("Queue method timed out on wait")
406406
{
407407
}
408408
}

src/DotnetSpider.Core/Infrastructure/Database/DatabaseExtensions.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,11 +29,11 @@ public static DbConnection CreateDbConnection(this ConnectionStringSettings conn
2929
{
3030
if (connectionStringSettings == null)
3131
{
32-
throw new SpiderException("ConnectionStringSetting is null.");
32+
throw new SpiderException("ConnectionStringSetting is null");
3333
}
3434
if (string.IsNullOrWhiteSpace(connectionStringSettings.ConnectionString) || string.IsNullOrWhiteSpace(connectionStringSettings.ProviderName))
3535
{
36-
throw new SpiderException("ConnectionStringSetting is incorrect.");
36+
throw new SpiderException("ConnectionStringSetting is incorrect");
3737
}
3838

3939
var factory = DbProviderFactories.GetFactory(connectionStringSettings.ProviderName);
@@ -64,7 +64,7 @@ public static DbConnection CreateDbConnection(this ConnectionStringSettings conn
6464
}
6565
}
6666

67-
throw new SpiderException($"Create or open DbConnection failed: {connectionStringSettings.ConnectionString}.");
67+
throw new SpiderException($"Create or open DbConnection failed: {connectionStringSettings.ConnectionString}");
6868
}
6969

7070
/// <summary>
@@ -128,7 +128,7 @@ public static DbConnection CreateDbConnection(Database source, string connectStr
128128
}
129129
}
130130

131-
throw new SpiderException("Create connection failed.");
131+
throw new SpiderException("Create connection failed");
132132
}
133133

134134
/// <summary>

src/DotnetSpider.Core/Infrastructure/Database/DbProviderFactories.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,11 +114,11 @@ public static DbProviderFactory GetFactory(string providerInvariantName)
114114
Configs.TryGetValue(providerInvariantName, out var factory);
115115
if (factory == null)
116116
{
117-
throw new SpiderException("Provider not found.");
117+
throw new SpiderException("Provider not found");
118118
}
119119
return factory;
120120
}
121-
throw new SpiderException("Provider not found.");
121+
throw new SpiderException("Provider not found");
122122
}
123123

124124
/// <summary>

src/DotnetSpider.Core/Pipeline/BaseFilePipeline.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ protected void InitFolder(string interval)
3737
{
3838
if (string.IsNullOrWhiteSpace(interval))
3939
{
40-
throw new SpiderException("Interval path should not be null.");
40+
throw new SpiderException("Interval path should not be null");
4141
}
4242
if (!interval.EndsWith(Env.PathSeperator))
4343
{

src/DotnetSpider.Core/Processor/TargetUrlsExtractor.Extensions.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ protected PaginationTargetUrlsExtractor(string paginationStr, ITargetUrlsExtract
267267
{
268268
if (string.IsNullOrWhiteSpace(paginationStr))
269269
{
270-
throw new SpiderException("paginationStr should not be null or empty.");
270+
throw new SpiderException("paginationStr should not be null or empty");
271271
}
272272

273273
PaginationStr = paginationStr;

src/DotnetSpider.Core/Proxy/HttpProxyPool.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ public class HttpProxyPool : IHttpProxyPool
3131
/// <param name="reuseInterval">代理不被再次使用的间隔</param>
3232
public HttpProxyPool(IProxySupplier supplier, int reuseInterval = 500)
3333
{
34-
_supplier = supplier ?? throw new SpiderException("IProxySupplier should not be null.");
34+
_supplier = supplier ?? throw new SpiderException("IProxySupplier should not be null");
3535

3636
_reuseInterval = reuseInterval;
3737

@@ -69,7 +69,7 @@ public UseSpecifiedUriWebProxy GetProxy()
6969
Thread.Sleep(1000);
7070
}
7171

72-
throw new SpiderException("Get proxy timeout.");
72+
throw new SpiderException("Get proxy timeout");
7373
}
7474

7575
/// <summary>

0 commit comments

Comments
 (0)