diff --git a/CrawlerDB.cs b/CrawlerDB.cs
index 022ef9d..fc24ebc 100644
--- a/CrawlerDB.cs
+++ b/CrawlerDB.cs
@@ -8,6 +8,15 @@ public class CrawlerDB
{
public const string DB_CONN_STRING = "data source=SUBODH;initial catalog=FileCrawler;Integrated security=true";
+ #region "Inserts new url to database|SaveFileURLToDB(String tableName, String hostName, string fileType, string fileDescription,string fileUrl)"
+ ///
+ /// Method to Insert a new URL to the specified table to Database
+ ///
+ /// The Table name to which the URL will be Inserted
+ /// The url host
+ /// the type of file to be Inserted
+ /// File Description
+ /// The URL of the file
public void SaveFileURLToDB(String tableName, String hostName, string fileType, string fileDescription,string fileUrl)
{
using (SqlConnection con = new SqlConnection(DB_CONN_STRING))
@@ -22,7 +31,7 @@ public void SaveFileURLToDB(String tableName, String hostName, string fileType,
command.Parameters.Add(new SqlParameter("fileDescription", fileDescription));
command.Parameters.Add(new SqlParameter("url", fileUrl));
command.Parameters.Add(new SqlParameter("createdDt", DateTime.Now.ToString()));
- command.Parameters.Add(new SqlParameter("createdByName", "Subodh"));
+ command.Parameters.Add(new SqlParameter("createdByName",Environment.MachineName + "/" + Environment.UserName));
command.Parameters.Add(new SqlParameter("updatedDt", DBNull.Value));
command.Parameters.Add(new SqlParameter("updatedByName", DBNull.Value));
command.ExecuteNonQuery();
@@ -39,6 +48,7 @@ public void SaveFileURLToDB(String tableName, String hostName, string fileType,
}
}
}
+ #endregion
}
}
diff --git a/Program.cs b/Program.cs
index 84fb126..c3d6acb 100644
--- a/Program.cs
+++ b/Program.cs
@@ -14,9 +14,10 @@ namespace FileCrawler
{
class Program
{
+ static String webURL = @"http://ebooks.allfree-stuff.com/";
+
#region "Fields and Object Declaration"
- static String webURL = @"http://ebooks.allfree-stuff.com/";
static String fileTypePath = @"C:\Users\Subodhlc\Documents\Visual Studio 2012\Projects\FileCrawler\FileCrawler\FileTypes";
static CrawlerDB crawalerDatabase = new CrawlerDB();
static FileTypes fileTyp = new FileTypes();
@@ -24,19 +25,19 @@ class Program
#endregion
-
static void Main(string[] args)
{
//Will Get the FileTypes to Download
filters = fileTyp.GetFileTypesToDownlaod(fileTypePath);
-
//Will use app.config for confguration
PoliteWebCrawler crawler = new PoliteWebCrawler();
+ #region "Crawler Events"
crawler.PageCrawlStartingAsync += crawler_ProcessPageCrawlStarting;
crawler.PageCrawlCompletedAsync += crawler_ProcessPageCrawlCompleted;
crawler.PageCrawlDisallowedAsync += crawler_PageCrawlDisallowed;
crawler.PageLinksCrawlDisallowedAsync += crawler_PageLinksCrawlDisallowed;
+ #endregion
CrawlResult result = crawler.Crawl(new Uri(webURL));
if (result.ErrorOccurred)
@@ -45,6 +46,7 @@ static void Main(string[] args)
Console.WriteLine("Crawl of {0} completed without error.", result.RootUri.AbsoluteUri);
}
+ #region "Crawler Event Delegates"
static void crawler_ProcessPageCrawlStarting(object sender, PageCrawlStartingArgs e)
{
PageToCrawl pageToCrawl = e.PageToCrawl;
@@ -74,6 +76,7 @@ static void crawler_PageCrawlDisallowed(object sender, PageCrawlDisallowedArgs e
Console.WriteLine("Did not crawl page {0} due to {1}", pageToCrawl.Uri.AbsoluteUri, e.DisallowedReason);
SavePageCrawlDisallowed(pageToCrawl.Uri.AbsoluteUri);
}
+ #endregion
//Saving the file links
private static void SaveURLSuccess(string p)
@@ -102,7 +105,7 @@ private static void SavePageCrawlDisallowed(string p)
WriteToDB(p);
}
- //DB Writes
+ #region "Inserts new url to database"
private static void WriteToDB(string p)
{
try
@@ -131,6 +134,7 @@ private static void WriteToDB(string p)
System.Console.WriteLine("**************************************");
}
}
+ #endregion
}
}
\ No newline at end of file