This is a wrapper that I made for my autobuyer. It mimic firefox . For the moment it's not 100% optimized, but it still fast and probably safer than any other wrapper. Why ? Bacause it's more like a little browser than a simple wrapper. I won't enter the details, but I have made it look the more I can like firefox. It still can be improved if you have any suggestion go ahead. [justify]Features[/justify] Gzip/deflate Download haggle's image Manage the references Manage the cookies Can download many kind of ressources as JPG/SWF/PNG/CSS/JS etc.. to fake firefox's behavior Simple methods interface Code (Text): using System; using System.Collections.Generic; using System.Text; using System.Net.Sockets; using System.Drawing; using System.Drawing.Imaging; using System.IO; using System.Net; using System.ComponentModel; using System.Data; using System.Threading; using System.Collections; using System.IO.Compression; /* * Author : Zav75 * * Purpose : This is a Wrapper for HTTP requests * this was made specially for the site of neopets.com * This should be useful to make an autobuyer. * * Contact me : zavier86@hotmail.com * * */ namespace AutoBuyer { /// <summary> /// This class has as a goal to emulate someone who will navigate in neopet /// with firefox. To do it propetly it is needed that we download everything /// that firefox download, all the images and javaScripts. Probly this class /// can be used for any other site and you would not be seen. /// </summary> class WrapperHTTP { static private string _referer = "http://www.neopets.com/hi.phtml"; static private CookieContainer _cookieJar = null; static private bool _isLoggedIn = false; private struct AcceptHeader { public const String JPG = "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,*/*;q=0.8"; public const String PNG = "image/png,image/*;q=0.8,*/*;q=0.5"; public const String HTML = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"; public const String CSS = "text/css,*/*;q=0.1"; public const String JS = "*/*"; public const String PHP = "*/*"; public const String CAPCHA_SHOW = "image/png,image/*;q=0.8,*/*;q=0.5,en;q=0.3"; public const String GIF = "image/png,image/*;q=0.8,*/*;q=0.5"; public const String APP_X_SWF = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"; }; /// <summary> /// Constructor /// </summary> public WrapperHTTP() { } /// <summary> /// This method fake a restocker fast haggling /// </summary> /// <param name="offer"></param> /// <returns></returns> static private string smartHaggle(string offer) { //ie: offer = 15789 string newOffer = ""; string digit = ""; newOffer = offer.Substring(0,1);// ie :1 digit = offer.Substring(1, 1);// ie : 5 for (int i = 1; i < offer.Length; i++) { newOffer += digit; //if the fast gaggling is lower than what the merchant ask if ((int.Parse(newOffer) < int.Parse(offer)) && (i == offer.Length-1))//ie: 15555 < 15789 { int tmp = int.Parse(digit) ; tmp++; digit = tmp.ToString();//we increment the digit i = 0; newOffer = offer.Substring(0, 1); } } return newOffer; } /// <summary> /// This method buys an item from a shop /// </summary> /// <param name="XY">The coordinnates from the picture</param> /// <param name="offer"></param> /// <param name="referer"></param> /// <returns></returns> static public bool buy(string XY, string offer, string referer) { bool reponse = false; int lenght; String[] tmp = offer.Split(','); offer = ""; for (int p = 0; p < tmp.Length; p++) offer += tmp[p]; offer = smartHaggle(offer); lenght = XY.Length + offer.Length + 14;//current_offer= string data = "current_offer=" + offer + XY; HttpWebRequest request = (HttpWebRequest)WebRequest.Create("http://www.neopets.com/haggle.phtml"); request.CookieContainer = _cookieJar; request.Method = "POST"; request.UserAgent = "Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1"; request.Accept = AcceptHeader.CAPCHA_SHOW; request.Headers.Add(HttpRequestHeader.AcceptLanguage, "en-us,en;q=0.5"); request.Headers.Add(HttpRequestHeader.AcceptEncoding, "gzip,deflate"); request.Headers.Add(HttpRequestHeader.AcceptCharset, "ISO-8859-1,utf-8;q=0.7,*;q=0.7"); request.Headers.Add(HttpRequestHeader.KeepAlive, "300"); request.ContentType = "application/x-www-form-urlencoded"; request.KeepAlive = true; request.Referer = "http://www.neopets.com/" + referer; request.AllowAutoRedirect = false; request.Proxy = null; byte[] bytes = Encoding.ASCII.GetBytes(data); request.ContentLength = bytes.Length; Stream os = request.GetRequestStream(); os.Write(bytes, 0, bytes.Length); os.Close(); //After buying we are redirected to the shop from this url for all shops _referer = "http://www.neopets.com/haggle.phtml"; Console.WriteLine(_referer); //we get the response from the server HttpWebResponse WebResponse = (HttpWebResponse)request.GetResponse(); //we change the response into a Stream Stream responseStream = WebResponse.GetResponseStream(); //if we see in the stream gzip it's ziped else it's not if (WebResponse.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (WebResponse.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); //from the reponse stream we read the html with StreamReader StreamReader Reader = new StreamReader(responseStream, Encoding.Default); string Html = Reader.ReadToEnd(); if (Html.IndexOf("I accept your offer") != -1) { Console.WriteLine("Item bought !"); reponse = true; } else { Console.WriteLine("Item missed !"); reponse = false; } Reader.Close(); // And we close Reader = null; return reponse; } static public void connecteToNeopet(string password, string username) { if (!_isLoggedIn) { /* CONSTRUCTION OF THE QUERY */ _cookieJar = new CookieContainer(); string data = "username=" + username + "&password=" + password + "&destination=%2Fpetcentral.phtml"; int content_lenght = data.Length; HttpWebRequest req = (HttpWebRequest)WebRequest.Create("http://www.neopets.com/login.phtml"); req.Method = "POST"; req.UserAgent = "Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1"; req.Headers.Add(HttpRequestHeader.AcceptLanguage, "en-us,en;q=0.5"); req.Headers.Add(HttpRequestHeader.AcceptEncoding, "gzip,deflate"); req.Headers.Add(HttpRequestHeader.AcceptCharset, "ISO-8859-1,utf-8;q=0.7,*;q=0.7"); req.Headers.Add(HttpRequestHeader.KeepAlive, "300"); req.KeepAlive = true; req.ContentType = "application/x-www-form-urlencoded"; req.Referer = "http://www.neopets.com/hi.phtml"; req.CookieContainer = _cookieJar; req.ContentLength = content_lenght; req.AllowAutoRedirect = false; req.Proxy = null; req.Accept = "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; _referer = "http://www.neopets.com/hi.phtml";//still hi.phtml yes Console.WriteLine(_referer); /* ENCODING POST DATA */ byte[] bytes = System.Text.Encoding.ASCII.GetBytes(data); Stream os = req.GetRequestStream(); os.Write(bytes, 0, bytes.Length); os.Close(); /* GETTING THE ANWSER */ HttpWebResponse resp = (HttpWebResponse)req.GetResponse(); //we change the response into a Stream Stream responseStream = resp.GetResponseStream(); bool connected = false; foreach (Cookie cook in resp.Cookies) { _cookieJar.Add(cook); if (cook.Name.Contains("neologin")) connected = true; } Thread downloadAll = new Thread(new ThreadStart(downloadIndex)); downloadAll.Start(); /* ANALYSING RESULTS */ //I guesse if we see logout it means we are in ^_^ if (connected) { Console.WriteLine("Connection succesful\r\n"); } else { Console.WriteLine("You don't have the correct password/username"); } _isLoggedIn = true; } else { logOut(); connecteToNeopet(password, username); _isLoggedIn = true; } } /// <summary> /// This methis log out of neopet /// </summary> static public void logOut() { /* CONSTRUCTION OF THE QUERY TO LOG OUT */ HttpWebRequest request = HTTPqueryMaker("http://www.neopets.com/logout.phtml"); _cookieJar = null; _referer = "http://www.neopets.com/hi.phtml"; /* GETTING THE ANWSER */ HttpWebResponse resp = (HttpWebResponse)request.GetResponse(); //we change the response into a Stream Stream responseStream = resp.GetResponseStream(); //if we see in the stream gzip it's ziped else it's not if (resp.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (resp.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); resp.Close(); responseStream.Close(); } static private void downloadIndex() { /* CONSTRUCTION OF THE QUERY GO AT THE INDEX http://www.neopets.com/index.phtml */ // _referer = "http://www.neopets.com/hi.phtml"; HttpWebRequest request = HTTPqueryMaker("http://www.neopets.com/index.phtml"); //Here we put referer ready to LogIn() _referer = "http://www.neopets.com/index.phtml"; /* GETTING THE ANWSER */ HttpWebResponse resp = (HttpWebResponse)request.GetResponse(); //we change the response into a Stream Stream responseStream = resp.GetResponseStream(); //if we see in the stream gzip it's ziped else it's not if (resp.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (resp.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); //from the reponse stream we read the html with StreamReader // StreamReader Reader = new StreamReader(responseStream, Encoding.Default); // string Html = Reader.ReadToEnd(); responseStream.Close(); resp.Close(); } /// <summary> /// This class is for the Thread downlaodRessources, so we can pass an argument /// </summary> private class downlaodItems { string rawHTML; public downlaodItems(string rawHTML) { this.rawHTML = rawHTML; } public void download() { downlaodRessources(rawHTML); } } /// <summary> /// This return a html page and download all needed components /// </summary> /// <param name="url">URL of the html page</param> /// <returns>html page in string format</returns> static public string downloadPage(string url) { /* CONSTRUCTION OF THE QUERY */ HttpWebRequest request = HTTPqueryMaker(url); /* GETTING THE ANWSER */ HttpWebResponse resp = (HttpWebResponse)request.GetResponse(); //we change the response into a Stream Stream responseStream = resp.GetResponseStream(); //if we see in the stream gzip it's ziped else it's not if (resp.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (resp.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); //from the reponse stream we read the html with StreamReader StreamReader Reader = new StreamReader(responseStream, Encoding.Default); string Html = Reader.ReadToEnd(); //Console.WriteLine(Html); //*** Here we download all the file that usually firefox download ***// // I created a class so we can call a thread with params //and left the unimportant downloads in a thread downlaodItems dl = new downlaodItems(Html); Thread downloadAll = new Thread(new ThreadStart(dl.download)); downloadAll.Start(); responseStream.Close(); resp.Close(); return Html; } /// <summary> /// Download safely the haggle image, but you need to find the image's url and referer on your own /// </summary> /// <param name="urlImage"> /// Should look like this : /// http://www.neopets.com/captcha_show.phtml?_x_pwned=f199f0ffb046abaf7bba1ed20b231e98 /// using http://www.neopets.com/captcha_show.phtml alone is not recommanded. /// </param> /// /// <param name="referer"> /// Should look like this: /// http://www.neopets.com/haggle.phtml?obj_info_id=7432&stock_id=891236737&brr=1366 /// </param> /// <returns> /// /// </returns> static public Bitmap downloadImage(string urlImage,string referer) { try { _referer = referer; HttpWebRequest request = (HttpWebRequest)WebRequest.Create(urlImage); request.CookieContainer = _cookieJar; request.Method = "GET"; request.UserAgent = "Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1"; request.Accept = "image/png,image/*;q=0.8,*/*;q=0.5,en;q=0.3"; request.Headers.Add(HttpRequestHeader.AcceptLanguage, "en-us,en;q=0.5"); request.Headers.Add(HttpRequestHeader.AcceptEncoding, "gzip,deflate"); request.Headers.Add(HttpRequestHeader.AcceptCharset, "ISO-8859-1,utf-8;q=0.7,*;q=0.7"); request.KeepAlive = true; request.Proxy = null; request.Referer = _referer; request.AllowAutoRedirect = false; Console.WriteLine("download : " + urlImage + " with " + _referer + "as referer.."); HttpWebResponse response = (HttpWebResponse)request.GetResponse(); Stream responseStream = response.GetResponseStream(); if (response.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (response.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); return new Bitmap(responseStream); } catch (Exception e) { Console.WriteLine(e.Message); return null; } } /// <summary> /// Download all the ressources that firefox usually download from one html document /// </summary> /// <param name="rawHTML"></param> static private void downlaodRessources(string rawHTML) { int begin = 0; int end = 0; int numberOfRessources = countOccurence(rawHTML, "src=\""); string ressourceLink = ""; for (int i = 0; i < numberOfRessources; i++) { begin = rawHTML.IndexOf("src=\"", begin) + 5;//src="http://images.neopets.com/n.js" end = rawHTML.IndexOf("\"", begin); int lenght = end - begin; ressourceLink = rawHTML.Substring(begin, lenght); if (ressourceLink.Contains(".jpg")) downloadRessource(ressourceLink, AcceptHeader.JPG); if (ressourceLink.Contains(".png")) downloadRessource(ressourceLink, AcceptHeader.PNG); if (ressourceLink.Contains(".swf")) downloadRessource(ressourceLink, AcceptHeader.APP_X_SWF); if (ressourceLink.Contains(".html")) downloadHTML(ressourceLink);//html is a special case if (ressourceLink.Contains(".css")) downloadRessource(ressourceLink, AcceptHeader.CSS); if (ressourceLink.Contains(".js")) downloadRessource(ressourceLink, AcceptHeader.JS); if (ressourceLink.Contains(".gif")) downloadRessource(ressourceLink, AcceptHeader.GIF); if (ressourceLink.Contains(".icon")) Console.WriteLine("ICON FOUND !!!");//debug } } /// <summary> /// Count the number of time a patern of string appears in a html file /// </summary> /// <param name="html"></param> /// <returns></returns> static private int countOccurence(string html, string patern) { int index = 0; int count = 0; while (html.IndexOf(patern, index) != -1) { index = html.IndexOf(patern, index) + patern.Length; //we add occurence.Length so we don't while eternity count++; } return count; } /// <summary> /// Download any file GIF/JPG/ICON/SWF/CSS/JS/GIF /// </summary> /// <param name="url"></param> /// <param name="accept"></param> static private void downloadRessource(string url, string accept) { /* CONSTRUCTION OF THE QUERY */ //Here we make a check up so it's a http url //else request will throw exceptions if (!url.ToLower().Contains("http")) return; string temporaryReferer = _referer; HttpWebRequest request = HTTPqueryMaker(url); _referer = temporaryReferer; //we don't want to change the referer when we download ressources /* GETTING THE ANWSER */ HttpWebResponse resp = (HttpWebResponse)request.GetResponse(); //we change the response into a Stream Stream responseStream = resp.GetResponseStream(); //if we see in the stream gzip it's ziped else it's not if (resp.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (resp.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); responseStream.Close(); resp.Close(); //Here we don't really want to use the stuff we downloaded it's just for show. } /// <summary> /// Only Download a html document, just to fake being a true firefox browser /// But do not return html document as his public version. This is a useful /// method /// </summary> /// <param name="url"></param> static private void downloadHTML(string url) { /* CONSTRUCTION OF THE QUERY */ HttpWebRequest request = HTTPqueryMaker(url); HttpWebResponse resp = (HttpWebResponse)request.GetResponse(); //we change the response into a Stream Stream responseStream = resp.GetResponseStream(); //if we see in the stream gzip it's ziped else it's not if (resp.ContentEncoding.ToLower().Contains("gzip")) responseStream = new GZipStream(responseStream, CompressionMode.Decompress); else if (resp.ContentEncoding.ToLower().Contains("deflate")) responseStream = new DeflateStream(responseStream, CompressionMode.Decompress); //from the reponse stream we read the html with StreamReader StreamReader Reader = new StreamReader(responseStream, Encoding.Default); string Html = Reader.ReadToEnd(); resp.Close(); responseStream.Close(); downlaodItems dl = new downlaodItems(Html); Thread downloadAll = new Thread(new ThreadStart(dl.download)); downloadAll.Start(); } /// <summary> /// This method is used to create a HttpWebRequest Object from a url /// </summary> /// <param name="url"></param> /// <returns></returns> static private HttpWebRequest HTTPqueryMaker(string url) { HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url); request.Method = "GET"; request.UserAgent = "Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1"; request.Accept = AcceptHeader.HTML; request.Headers.Add(HttpRequestHeader.AcceptLanguage, "en-us,en;q=0.5"); request.Headers.Add(HttpRequestHeader.AcceptEncoding, "gzip,deflate"); request.Headers.Add(HttpRequestHeader.AcceptCharset, "ISO-8859-1,utf-8;q=0.7,*;q=0.7"); request.Headers.Add(HttpRequestHeader.KeepAlive, "300"); request.KeepAlive = true; request.Referer = _referer; request.CookieContainer = _cookieJar; request.Proxy = null; request.AllowAutoRedirect = false; Console.WriteLine("download : " + url + " with " + _referer + " as referer.."); _referer = url; return request; } /// <summary> /// Return the actual session via a CookieContainer /// </summary> /// <returns></returns> static public CookieContainer getCookies() { return _cookieJar; } } } I would like to hear your comments and ideas on this wrapper. Thanks in advance.
Why don't you just make a function to connect to the internet? Try a syntax like this: Code (Text): static private void GETRequest(string url, string referer) static private void POSTRequest(string url, string data, string referer)
Humm , I won't do that because people make mistake with referers and it makes the program easily detectable by tnt.. I use the systaxe Code (Text): static public string downloadPage(string url) because my wrapper manage the referer alone. For the post, in neopet the only two post that I needed was in connect, to get your cookie, and the one in buy (to buy an item from a shop). Of course, if you want to use it for an application that buy/sell stock you'll have to make the buy function for the stock, however you already have all the example needed and you probably juste change the url and post data. But, I'll agree that it may be useful to have a post for other operations. Except that I would have the following syntaxe Code (Text): static private void POSTRequest(string url, string data) again for the same reason, to force people use the same path as a true firefox user, and to be sure the referers are well managed.