Thursday, July 14, 2011

How to create new language codes that's not available in Manage Website Languages

Sometimes the operating system doesn't have all the language codes that you need on the server you run your EPiServer solution. One way to solve that, is to create an console application which you run on the server with argument for the new langauge code based on one language code that already exists on the server.

Create a console application and copy this code:

   1:  using System;
   2:  using System.Collections.Generic;
   3:  using System.Linq;
   4:  using System.Text;
   5:  using System.Globalization;
   6:   
   7:  namespace CreateLanguage
   8:  {    
   9:      public class CreateLanguage
  10:      { 
  11:          public CreateLanguage()
  12:          {
  13:          }
  14:   
  15:          public CreateLanguage(string inputLine)
  16:          {
  17:              SetLanguage(inputLine);
  18:          }
  19:   
  20:          private void SetLanguage(string inputLine)
  21:          {
  22:              if (!String.IsNullOrEmpty(inputLine))
  23:              {
  24:                  string[] LanguageCode = inputLine.Split('_');
  25:   
  26:                  string copyLanguageId = LanguageCode[0];
  27:                  string newLanguageId = LanguageCode[1];
  28:                  string newLanguageName = LanguageCode[2];
  29:   
  30:                  //* Get the base culture and region information
  31:                  CultureInfo cultureInfo = new CultureInfo(copyLanguageId.ToString());
  32:                  RegionInfo regionInfo = new RegionInfo(cultureInfo.Name);
  33:   
  34:                  //* Create a locale 
  35:                  CultureAndRegionInfoBuilder cultureAndRegionInfoBuilder = new CultureAndRegionInfoBuilder(newLanguageId.ToString(), CultureAndRegionModifiers.None);
  36:   
  37:                  //* Load the base culture and region information
  38:                  cultureAndRegionInfoBuilder.LoadDataFromCultureInfo(cultureInfo);
  39:                  cultureAndRegionInfoBuilder.LoadDataFromRegionInfo(regionInfo);
  40:   
  41:                  //* Set the culture name
  42:                  cultureAndRegionInfoBuilder.CultureEnglishName = newLanguageName.ToString();
  43:                  cultureAndRegionInfoBuilder.CultureNativeName = newLanguageName.ToString();
  44:   
  45:                  NumberFormatInfo nfi = cultureInfo.NumberFormat;
  46:                  
  47:                  cultureAndRegionInfoBuilder.NumberFormat = nfi;
  48:   
  49:                  //* Register with your operating system
  50:                  cultureAndRegionInfoBuilder.Register();
  51:              }
  52:          }
  53:      }
  54:  }

After compiling the code, then run a command prompt under administrative priviledges, where you type something like this: CreateLanguage.exe "en-EN" "en-RO" "English (România)"

Once you've run this program, fire up EPiServer in Admin mode, select 'Manage Web Site languages' from Config-tab, click 'Add language' and the new local will be ready to use.

PS: If you run EPiServer CMS 6 you will get a bug unless you write to EPiServer support and ask for a bug fixed EPiServer.dll. On EPiServer CMS 5 this works just fine. :-)

Wednesday, July 13, 2011

Robots.txt enterprise version for EPiServer CMS

Adding robots.txt file to a single site is easy. To do the samething for an enterprise version is a bit more work. The easiest way to solve the challange is to add an HttpHandler that write out the correct content based on the url of the sites. The example code below get the content from files that is stored in  /Global/Robots/ in filemanager, but you could easly change it to a different path.

Under the /Global/Robots folder you add files that is simular to this: 
  • mysite.com_robots.txt
  • mysite2.com_robots.txt
Also add an default_robots.txt to have a fallback version. Read here to get an idea how to write the content of the robots.text file, robotstxt.org

Create a class and add this code:

   1:  namespace Common.CoreLibrary.Robots
   2:  {
   3:      public class RobotsHandler : IHttpHandler
   4:      {
   5:   
   6:          public void ProcessRequest(HttpContext context)
   7:          {
   8:              string path = context.Request.Url.DnsSafeHost;
   9:   
  10:              if( path.Contains("www."))
  11:                  path = path.Replace("www.","");
  12:   
  13:              path = "/Global/Robots/" + path + "_robots.txt";
  14:              UnifiedFile file = HostingEnvironment.VirtualPathProvider.GetFile(path) as UnifiedFile;
  15:   
  16:              if (file != null)
  17:              {
  18:                  if (context.Request.ServerVariables["Https"] == "off")
  19:                  {
  20:                      //Read spesific file for each brand
  21:                      Outfile(context, file);
  22:                  }
  23:                  else
  24:                  {
  25:                      // HTTPS
  26:                      context.Response.ContentType = "text/plain";
  27:                      context.Response.Write("User-agent: *\n");
  28:                      context.Response.Write("Disallow: /");
  29:                  }
  30:              }
  31:              else
  32:              {
  33:                  //Backup file if not the spesific robots.txt exits.
  34:                  path = "/Global/Robots/default_robots.txt";
  35:                  file = HostingEnvironment.VirtualPathProvider.GetFile(path) as UnifiedFile;
  36:   
  37:                  if (file == null) return;
  38:   
  39:                  Outfile(context, file);
  40:              }
  41:          }
  42:   
  43:          private static void Outfile(HttpContext context, UnifiedFile file)
  44:          {
  45:              Stream sourceFile = file.Open(FileMode.Open, FileAccess.Read, FileShare.Read);
  46:              context.Response.ContentEncoding = Encoding.UTF8;
  47:              context.Response.ContentType = "text/plain";
  48:              //Write the file directly to the HTTP content output stream. 
  49:              long FileSize;
  50:              FileSize = sourceFile.Length;
  51:              byte[] getContent = new byte[(int)FileSize];
  52:              sourceFile.Read(getContent, 0, (int)sourceFile.Length);
  53:              sourceFile.Close();
  54:   
  55:              context.Response.BinaryWrite(getContent);
  56:              context.Response.End();
  57:          }
  58:   
  59:          #region IHttpHandler Members
  60:   
  61:          public bool IsReusable
  62:          {
  63:              get { return false; }
  64:          }
  65:   
  66:          #endregion
  67:   
  68:      }
  69:  }


Add the following line to the web.config after <system.webServer><handlers>:
<add name="Robots" path="/robots.txt" verb="*" type="Common.CoreLibrary.Robots.RobotsHandler" resourceType="Unspecified" requireAccess="Script" />

So next time you write mysite.com/robots.txt or mysite2.com/robots.txt you should recive different content. :-)