Friday, October 8, 2010

Install.bat Part 2

Install.bat for VS 2008

A ready to use install.bat. The Xcopy part has been fully defined since it was not working properly in VS2008.

@SET TEMPLATEDIR="c:\program files\common files\microsoft shared\web server extensions\12\Template"
@SET STSADM="c:\program files\common files\microsoft shared\web server extensions\12\bin\stsadm"
@SET GACUTIL="C:\Program Files\Microsoft SDKs\Windows\v6.0A\bin\GacUtil.exe"

Echo Copying files to TEMPLATE directory
xcopy "C:\Partha\Devarea\MyFeatureReceiver\MyFeatureReceiver1\MyFeatureReceiver1\TEMPLATE\*" "c:\program files\common files\microsoft shared\web server extensions\12\Template" /e /y

REM Echo Installing feature
REM %STSADM% -o InstallFeature -filename MyFeatureReceivers\feature.xml -force

REM Echo Uninstalling the DLL from GAC
REM %GACUTIL% /u MyFeatureReceiver1

Echo Installing the DLL in GAC
Echo %GACUTIL%
%GACUTIL% /i "C:\Partha\Devarea\MyFeatureReceiver\MyFeatureReceiver1\MyFeatureReceiver1\bin\Debug\MyFeatureReceivers.dll"

REM Echo Restart IIS Worker Process IISRESET
REM IISRESET


http://blah.winsmarts.com/2008-7-Authoring_SharePoint_2007_Workflows_using_VS2008.aspx
http://msdn.microsoft.com/en-us/library/bb466224(office.12).aspx

Install.bat - With Application pool Recycle in Visual Studio 2008

ECHO Installation in progress.........

@SET TEMPLATEDIR="C:\Program Files\Common Files\Microsoft Shared\web server extensions\12\TEMPLATE"
@SET STSADM="C:\program files\common files\microsoft shared\web server extensions\12\bin\stsadm"
@SET GACUTIL="C:\Program Files\Microsoft SDKs\Windows\v6.0A\bin\gacutil.exe"
@SET APPPOL="C:\WINDOWS\system32\cscript.exe"
@SET IISVBS="C:\WINDOWS\system32\iisapp.vbs"

ECHO Copying files..........
rem xcopy /e /y TEMPLATE\* "C:\Program Files\Common Files\Microsoft Shared\web server extensions\12\TEMPLATE"
xcopy /e /y TEMPLATE\* %TEMPLATEDIR%
ECHO Uninstalling the DLL from GAC
%GACUTIL% /u SimpleCustomApplicationPage1

ECHO Installing the DLL in GAC
%GACUTIL% /i "E:\Partha Projects\SP2007\Custome Application Pages\SimpleCustomApplicationPage1\SimpleCustomApplicationPage1\bin\Debug\SimpleCustomApplicationPage1.dll"

REM If we need to restart only the AppPool then
REM SET /P AppPoolName=[Please Enter the application pool name]
REM %APPPOL% %IISVBS% /a %AppPoolName% /r

ECHO Restart IIS Application Pool - Worker Process IISRESET
%APPPOL% %IISVBS% /a "SharePoint - 7070" /r

Another one
@echo off
ECHO Installation in progress.........

@SET TEMPLATEDIR="C:\Program Files\Common Files\Microsoft Shared\web server extensions\12\TEMPLATE"
@SET STSADM="C:\program files\common files\microsoft shared\web server extensions\12\bin\stsadm"
@SET GACUTIL="C:\Program Files\Microsoft SDKs\Windows\v6.0A\bin\gacutil.exe"
@SET APPPOL="C:\WINDOWS\system32\cscript.exe"
@SET IISVBS="C:\WINDOWS\system32\iisapp.vbs"

ECHO Copying files..........
xcopy /e /y TEMPLATE\* %TEMPLATEDIR%
ECHO Uninstalling the DLL from GAC
%GACUTIL% /u MultiLingualListDefinition

ECHO Installing the DLL in GAC
%GACUTIL% /i "E:\Partha Projects\SP2007\CustomListDefinition\MultiLingualListDefinition\bin\Debug\MultiLingualListDefinition.dll"

Echo Installing Feature.......
%STSADM% -o installfeature -filename MLContentType\feature.xml -force
%STSADM% -o installfeature -filename MLStringListDefinition\feature.xml -force

REM Echo Activating Features...........
REM %STSADM% -o activatefeature -filename MLContentType\feature.xml
REM %STSADM% -o activatefeature -filename MLStringListDefinition\feature.xml


REM If we need to restart only the AppPool then
REM SET /P AppPoolName=[Please Enter the application pool name]
REM %APPPOL% %IISVBS% /a %AppPoolName% /r

ECHO Restart IIS Application Pool - Worker Process IISRESET
%APPPOL% %IISVBS% /a "SharePoint - 7070" /r

CabLib – Error running MakeCab.exe

CabLib – Error running MakeCab.exe
Could not load file or assembly 'CabLib, Version=6.9.26.0, Culture=neutral, Publ
icKeyToken=85376ef9a48d191a' or one of its dependencies
I was trying to create a solution package for SharePoint. Thus, planned to use the Makecab.exe to create the cab file. Upss!! got this error while running the makecab.exe /f MyAppPackage.ddf
Building the solution - please wait
Saving the Manifest.xml file
Creating the WSP file
Could not load file or assembly 'CabLib, Version=6.9.26.0, Culture=neutral, PublicKeyToken=85376ef9a48d191a' or one of its dependencies. An attempt was made to load a program with an incorrect format. It seemed that the Dll could not be loaded
Sometimes this could happen while running wspbuilder.exe also.
Investigation:
1. I have the CabLib.dll file in the same directory as wspbuilder.exe
2. cabinet.dll exists in the windows\system32 directory.
3. The CABLIB.DLL is a C++ library and therefore you have to use the right version of it when you are using 32 or 64 bit windows server systems.
Solution :
Thought of one very simple solution, normally whenever I get stuck with such problem I use that only…. Putting the dll in the GAC haaaaaa. Found the dll from this location.
C:\Program Files\WSPTools\WSPBuilderExtensions\Resources\x86]
CabLib.dll
Dropped in Assembly file….. i.e. GAC. And it worked…. That’s all….
HaPpY CoDiNg………….
Partha (Aurum)

Wednesday, July 28, 2010

Upgrade to SharePoint 2010 from MOSS 2007 – Considerations for Services

Considerations for specific services

The following services were available in Office SharePoint Server 2007 and can be upgraded to SharePoint Server 2010. Because the services infrastructure has changed, you need to consider additional factors when planning and performing an upgrade when these services are present in the environment.


 

Personalization Services

Profiles

  • Services: In SharePoint Server 2010, two services are now used for user profiles and taxonomy information: the User Profile service and the Managed Metadata service.
    • During in-place upgrade, these two services are automatically enabled and configured.
    • If you are using database attach upgrade, enable and configure the Managed Metadata service before you upgrade the User Profile service.
  • Databases:
    • During in-place upgrade, user profile data from Office SharePoint Server 2007 is upgraded from the SSP database into a new UserProfile database.
    • Any taxonomy data is upgraded, and you can copy the taxonomy data into a Taxonomy database for use by the Managed Metadata service after upgrade has been completed.
    • During a database attach upgrade, user profile and taxonomy data from the SSP database is upgraded when the SSP database is attached, but the database is not copied and renamed. You can copy the taxonomy data into a Taxonomy database for use by the Managed Metadata service after upgrade has been completed by using the Move-SPProfileManagedMetadataProperty Windows PowerShell cmdlet.
  • Persisted properties that relate to the profiles (such as the MySite Host URL) are preserved during an in-place upgrade, but they are not upgraded when you use database attach because they are stored in the configuration database, not the SSP database.
  • Any scheduled timer jobs will need to be reconfigured after upgrade. During upgrade, they are set back to the default times. Be sure to record your timer job schedules before upgrade so you can reapply the times.


 

My Sites

  • Upgrade the My Site host at the same time that you upgrade the profile services. You do not need to upgrade the My Sites themselves at the same time.
  • For best results, upgrade My Sites (or at least the My Site host) at the same time as your main intranet site.


 

Office SharePoint Server Search

  • In SharePoint Server 2010, the Search service uses three databases:
    • Search administration database (new): contains Search administration settings that were stored in the SSP database in Office SharePoint Server 2007.
    • Search Service Crawl database (new): contains crawl history information that was stored in the SSP database in SharePoint Server 2007.
    • Search metadata database (reused Search database): contains the metadata for search.
  • You cannot upgrade Search data by using the database attach approach for upgrading. If you are using database attach upgrade, you must configure Search in your new farm separately from (that is, either before or after) upgrading your other content.
  • If you are using the in-place upgrade approach, you should review and adjust your Search topology after upgrade to suit the new recommendations and requirements. For more information, see the Search models.


 

InfoPath Forms Services

  • When you upgrade by using the database attach upgrade approach, you must export any administrator-deployed form templates (XSN files) and data connection files (UDCX files) before migrating, then import them to the new farm after migrating by using the Export-SPInfoPathAdministrationFiles Windows PowerShell cmdlet. If the URL of the new server is different from the URL of the previous server, you can run the Update-SPInfoPathAdminFileUrl Windows PowerShell cmdlet to update links that are used in the upgraded form templates.
  • If you are running Office Forms Server 2007, you can upgrade to SharePoint Server 2010, but you must use the database attach upgrade approach.


 


 

Excel Services


 

  • Excel Services remains a local service for SharePoint Server 2010. This means that you must run the service in the same farm that consumes it.
  • For in-place upgrade, any configuration information stored in the SSP database for Excel Services is upgraded and moved into the configuration database.
  • For the database attach upgrade approach, you will have to reconfigure Excel Services in your new farm.
  • After upgrade (either in-place or database attach), you will have to provision a new unattended service account by using the Secure Store Service to use with Excel Services.


 

Business Data Catalog

  • During an in-place upgrade, data that was stored in the SSP database is moved and upgraded to a separate database. New service applications are created for the new version of the BDC service. A new service, the Application Registry Backwards-compatible service, is used to manage the old BDC connections.
  • The Business Data Catalog is not upgraded when you use the database attach upgrade approach.


 

Single Sign-On


 

  • The Single Sign-On (SSO) service is being replaced with the Secure Store Service in SharePoint Server 2010. To upgrade application definitions from SSO to the Secure Store Service, use the following Windows PowerShell cmdlets:

    Upgrade-SPSingleSignOnDatabase

    Upgrade-SSOConnectionString

    Upgrade-SecureStoreConnectionString

    Upgrade-SecureStorePassphrase

  • Note that passwords are not upgraded. After you upgrade the application definitions, you can make the Secure Store Service the default SSO provider.


 

In-place upgrade with services


 

When you perform an in-place upgrade, all of your services infrastructure and the settings for the services themselves are upgraded as part of the process.


 

Shared Services Providers

  • Shared Services Providers (SSPs) are converted to service applications and service application proxies, one per service. They are given default names (for example, if the SSP was named "SharedServices1", the service applications will be named "SharedServices1_service", as in SharedServices1_Search).
  • All SSPs that are upgraded retain their associations with the Web applications that consumed from that SSP.
  • All SSP administrators are added to Central Administration as delegated administrators.


 

Databases

  • The SSP database is upgraded, and data is copied into new user profiles and taxonomy databases. Other services information is moved into other service databases or the configuration database.


 

Sites

  • The SSP Admin site is upgraded as a mostly blank site except for the BDC profile pages. The site can be deleted after upgrade if it is not needed for BDC pages.


 

Before upgrade

  • Collect any settings that must be reapplied, such as scheduled timer job settings.
  • Review your services architecture and determine what, if any, changes to make after upgrade.


     

During upgrade (single SSP)

If you have a single SSP, all proxies for service applications are added to the default proxy group.


 

After upgrade

Configure new and upgraded services

Many new services are available in SharePoint Server 2010. You can enable these new services after you perform an in-place upgrade.

  • You must create service applications to host any new services. You can use the farm configuration wizard to quickly select and enable several new services in your farm, or you can configure the services manually.
  • You can also add proxies for any service applications that you want to use with different Web applications.

For Profile Services, upgrade any taxonomy data manually:

  • Use the Move-SPProfileManagedMetadataProperty Windows PowerShell cmdlet to upgrade profile taxonomy data to the Taxonomy database and reconnect the data to the Managed Metadata and User Profiles service applications.
  • The User Profiles service proxy and Managed Metadata service proxy must be in the same proxy group to upgrade and use the data.


 

For Excel Services, provision a new unattended service account for the Secure Store Service.


 

For BDC, consider migrating the BDC profile pages to a new location.


 

Database attach upgrade with services

Most services settings will need to be reconfigured when you upgrade via database attach. When you move your databases to a new farm and upgrade the content, you must create your services infrastructure in the new farm and configure the services appropriately for your new farm and new version. You can attach the SSP databases from your old farm, but only the profile information in that database is upgraded – any search information or other services settings are not upgraded. You cannot upgrade Search databases by using the database attach upgrade approach.


 

If you are using the database attach approach for upgrading to SharePoint Server 2010, there are several steps to perform before, during, and after the upgrade to successfully reconfigure the services infrastructure.

Before upgrade

  • When you configure the new farm, you must also configure the new service applications and service application proxies for the farm, and configure the settings for all services that you want to use.
  • If you are using Profile Services, you must also configure the Managed Metadata service to be able to upgrade any taxonomy data from the SSP database.
  • For Forms Services, export any administrator-deployed form templates (XSN files) and data connection files (UDCX files) from your Office SharePoint Server 2007 farm by using the following command: Stsadm.exe -o exportipfsadminobjects -filename <path to export CAB>.
  • For Forms Services, import any administrator-deployed form templates and data connection files to your new farm before you attach the content databases. Use the Import-SPIPAdministrationFiles Windows PowerShell cmdlet to import the forms.

During upgrade

  • When you attach and upgrade the content databases, you also attach and upgrade the SSP database, which upgrades the profile information in the database.
  • Upgrade the My Site host at the same time as the User Profiles data (or else set the My Site host and all My Sites to read-only). You do not need to upgrade the My Sites themselves, just the My Site host.

After upgrade

  • Reapply administrator permissions for services. By default, farm administrators have permissions to all services when you perform a database attach upgrade.
  • For Excel Services, you must provision a new unattended service account by using the Secure Store Service to use with Excel Services.
  • For InfoPath Forms Services, update any links that are used in the upgraded form templates by using the Update-SPInfoPathAdminFileURL Windows PowerShell cmdlet.
  • For Profile Services, upgrade any taxonomy data. You use the Move-SPProfileManagedMetadataProperty Windows PowerShell cmdlet to upgrade profile taxonomy data manually to the Taxonomy database and reconnect the data to the Managed Metadata and User Profiles service applications. The User Profiles service and Managed Metadata service must be in the same proxy group to upgrade and use the data.
  • For BDC, consider migrating the BDC profile pages to a new location.

Tuesday, July 20, 2010

SharePoint Architecture on IIS 6

The primary goal of this chapter is for you to understand the internal workings of a Web application and how it initializes the WSS runtime environment.


 

IIS Web Sites and Virtual Directories

Both ASP.NET and WSS rely on IIS 6.0 to supply the underlying listening mechanism to process incoming HTTP requests and supply a management infrastructure for launching and running worker processes on the Web server. An IIS Web site provides an entry point into the IIS Web server infrastructure. Each IIS Web site is configured to listen for and process incoming HTTP requests that meet certain criteria. For example, an IIS Web site can be configured to handle requests coming in over a specific IP address or port number or can be routed to the Web server by using a specific host header.


 

IIS automatically creates and configures an IIS Web site named Default Web Site that listens for HTTP requests coming in over port 80 on any of the IP addresses supported on the local Web server. Each IIS Web site is configured to map to a root directory, which is a physical directory on the file system of the hosting Web server. For example, standard configuration for IIS maps the Default Web Site to a root directory located at C:\Inetpub\wwwroot. In the most straightforward routing scenarios, IIS maps incoming HTTP requests to physical files inside the root directory. For example, IIS will respond to a request for http://www.MyTestApp.com/page1.htm by simply loading the contents of the file located at c:\Inetpub\wwwroot\MyTestApp\page1.htm into memory and streaming it back to the client.


 

Note that IIS tracks configuration information about its IIS Web sites and virtual directories in a repository known as the IIS metabase. The IIS metabase lives on the file system of each front-end Web server running IIS. For example, when you create and configure an IIS Web site using the IIS administration utility, IIS tracks these changes by writing entries to the local IIS metabase.


 

ISAPI Extensions and ISAPI Filters


 

IIS also supports the Internet Server Application Programming Interface (ISAPI) programming model, which provides the opportunity for more sophisticated routing scenarios. In particular, the ISAPI programming model allows you to configure an IIS Web site or virtual directory so that incoming requests trigger the execution of custom code on the Web server. The ISAPI programming model consists of two primary component types: ISAPI extensions and ISAPI filters.


 

An ISAPI extension is a component DLL that plays the role of an endpoint for an incoming request. The fundamental concept is that IIS can map incoming requests to a set of endpoints that trigger the execution of code within an ISAPI extension DLL. An ISAPI extension DLL must be installed on the Web server and configured at the level of either an IIS Web site or virtual directory. Configuration commonly involves associating specific file extensions with the ISAPI extensions by using an IIS application map.


 

While an ISAPI extension serves as an endpoint, an ISAPI filter plays the role of an interceptor. An ISAPI filter is installed and configured at the level of the IIS Web site. Once installed, an ISAPI filter intercepts all incoming requests targeting that IIS Web site. The fundamental concept is that an ISAPI filter can provide pre-processing and post-processing for each and every incoming request. ISAPI filters are typically created to provide low-level functionality for an IIS Web site, such as custom authentication and request logging.


 

Application Pools and the IIS Worker Process


 

IIS provides a flexible infrastructure for managing worker processes by using application pools. An application pool
is a configurable entity that allows you to control how IIS maps IIS Web sites and virtual directories to instances of the IIS worker process. Note that instances of the IIS worker process are launched using an executable named w3wp.exe. The routing architecture of IIS is controlled by a kernel-level device driver named http.sys. This device driver listens for incoming HTTP requests and uses information in the IIS metabase to route them to whatever instance of w3wp.exe is associated with the target application pool. If http.sys determines that the target application pool doesn't have a running instance of w3wp.exe, it launches a new instance on demand to process the request.


 

Each IIS Web site and virtual directory can be configured to run in its own isolated application pool. Conversely, you can configure many different IIS Web sites and virtual directories to run in the same application pool for greater efficiency. The key observation you should make is that a tradeoff exists between isolation and efficiency. To achieve greater isolation means you must run more instances of w3wp.exe, which compromises efficiency. To achieve greater efficiency means you must map multiple IIS Web sites and virtual directories to fewer instances of the IIS worker process, which compromises isolation.


 

Every application pool has an important setting known as the application pool identity. The application pool identity is configured with a specific Windows user account that is either a local account on the Web server or a domain account within an Active Directory directory service domain. When http.sys launches a new instance of w3wp.exe for a specific application pool, it uses the application pool identity to initialize a Windows security token that serves as the process token. This is important because it establishes the "runs as" identity for code that runs within the IIS worker process.


 

By default, IIS uses the identity of the local Network Service account when you create a new application pool. However, you can configure the application pool identity by using any other user account you like. When deploying Web sites based on ASP.NET and WSS, it is recommended to configure the application pool identity with a domain account instead of the Network Service account. This is especially true in a Web farm environment when you need to synchronize the identity of an application pool across multiple front-end Web servers in the farm.


 

ASP.NET 2.0 Framework

The ASP.NET Framework is implemented as an ISAPI extension named aspnet_isapi.dll. The basic configuration for ASP.NET involves registering application maps for common ASP.NET file extensions including .aspx, .ascx, .ashx, and .asmx at the level of an IIS Web site or virtual directory. When IIS sees an incoming request targeting a file with one of these extensions, it forwards the request to aspnet_isapi.dll, which effectively passes control over to the ASP.NET Framework. The way in which the ASP.NET Framework processes a request largely depends on the extension of the target file. It is important to note that the ASP.NET Framework runs each ASP.NET application with a certain level of isolation. This is true even in a scenario when you have configured multiple ASP.NET applications to run within the same IIS application pool. The ASP.NET Framework provides isolation between ASP.NET applications running inside the same instance of the IIS worker process by loading each of them into a separate .NET Framework AppDomain.


 

HTTP Request Pipeline

Underneath the productivity-centered architecture for pages and server-side controls, the ASP.NET Framework exposes the HTTP Request Pipeline
for developers who like to work at a lower level. It provides the developer with a degree of control comparable with the ISAPI programming model. However, when you create a component for the HTTP Request Pipeline, you are able to write your code in a managed language such as C#. You can also use APIs provided by the ASP.NET Framework, which is much easier than using the ISAPI programming model.


 

The HTTP Request Pipeline has three replaceable component types: HttpHandler, HttpApplication, and HttpModule. As requests come in, they are queued up and assigned to a worker thread that then processes the request by interacting with each of these component types.


 

The ultimate destination of any request is the endpoint, which is modeled in the HTTP Request Pipeline by using an HttpHandler class, which implements the IHttpHandler interface. As a developer, you can create a custom HttpHandler component and plug it into the HTTP Request Pipeline by adding configuration elements to the web.config file.


 

The HTTP Request Pipeline places an HttpApplication component in front of the HttpHandler. On an application-wide basis, incoming requests are always routed through the HttpApplication
before they reach the target HttpHandler, thus giving the HttpApplication the ability to preprocess any request no matter which HttpHandler it is being routed to. This preprocessing stage is handled through a series of events that are defined inside the HttpApplication class such as BeginRequest, AuthenticateRequest, and AuthorizeRequest.


 

In situations when you don't want to use a custom HttpApplication component, the ASP.NET Framework initializes the HTTP Request Pipeline with a standard HttpApplication object that provides default behavior. However, you can replace this standard component by creating a file named global.asax and placing it in the root directory of the hosting ASP.NET application. For example, you can create a global.asax that looks like the following:

<%@ Application Language="C#" %>

<script runat="server">

protected void Application_AuthenticateRequest(object sender, EventArgs e) {

// your code goes here for request authentication

}

protected void Application_AuthorizeRequest(object sender, EventArgs e) {

// your code goes here for request authorization

}

</script>


 

The third replaceable component type in the HTTP Request Pipeline is the HttpModule. The HttpModule is similar to the HttpApplication component in that it is designed to handle events defined by the HttpApplication class and is processed before control is passed to any HttpHandler classes. For example, you can create a custom HttpModule component to handle request-level events such as BeginRequest, AuthenticateRequest, and AuthorizeRequest. As with the HttpHandler, an HttpModule class is defined with an interface. You can create a class that implements the IHttpModule interface and plug it into the HTTP Request Pipeline by adding configuration elements to the web.config file.


 

Whereas custom HttpApplication components can be defined as simple text files with an .asax extension, custom HttpModule components are always compiled as classes within assembly DLLs. To add a custom HttpModule component into the HTTP Request Pipeline, you then add entries into a web.config file.


 

While an HttpApplication component and an HttpModule component are similar in what they do, the HttpModule contains a few noteworthy differences. First, you are not limited to one HttpModule per application as you are with the HttpApplication components. The web.config file for an ASP.NET application can add in several different HttpModule components. Second, HttpModule components can be configured at the machine level. In fact, the ASP.NET Framework ships with several different HttpModule components that are automatically configured at the machine level to provide ASP.NET functionality for things such as Windows authentication, Forms authentication, and output caching.

The final component that we want to discuss with respect to the HTTP Request Pipeline is HttpContext. As ASP.NET initializes a request to send to the HTTP Request Pipeline, it creates an object from the HttpContext class and initializes it with important contextual information. From a timing perspective, it's important to see that ASP.NET creates this object before any custom code inside the HTTP Request Pipeline has a chance to begin execution. This means that you can always program against the HttpContext object and the child objects that it contains, such as Request, User, and Response. Whenever you are authoring a component that is to execute within the HTTP Request Pipeline, you can write code that looks like the following:


 

HttpContext currentContext = HttpContext.Current;

string incomingUrl = currentContext.Request.Url;

string currentUser = currentContext.User.Identity.Name;

currentContext.Response.Write("Hello world");


 


 

WSS Integration with ASP.NET


 

WSS integrates with ASP.NET at the level of the IIS Web site. Each IIS Web site in which you intend to host WSS sites must go through a one-time transformation process in which it is configured to become what WSS terminology refers to as a Web application. This transformation process involves adding IIS metabase entries and a WSS-specific web.config file to the root directory of the hosting IIS Web site. Once the transformation is completed, WSS extends the routing architecture of IIS and ASP.NET to properly route all incoming requests through the WSS runtime.


 

WSS must add an IIS application map to each Web application to ensure that each and every incoming request is initially routed to the ASP.NET runtime. Remember that the default configuration for ASP.NET only registers application maps for requests with well-known ASP.NET file extensions such as .aspx, ascx, .ashx, and .asmx. Therefore, WSS configures the hosting IIS Web site with a wildcard application map to route all incoming requests to aspnet_isapi.dll, including those requests with non-ASP.NET extensions such as .doc, .docx, and .pdf.


 

Because every request targeting a Web application is routed through aspnet_isapi.dll, the request gets fully initialized with ASP.NET context. Furthermore, its processing behavior can be controlled by using a custom HttpApplication object and adding configuration elements to the web.config file. The WSS team uses standard ASP.NET techniques to extend the HTTP Request Pipeline by using several custom components.


 

First, you can see that WSS configures each Web application with a custom HttpApplication

object by using the SPHttpApplication class. Note that this class is deployed in the WSS system

assembly Microsoft.SharePoint.dll. WSS integrates this custom application class by creating a

custom global.asax file at the root of the Web application that inherits from SPHttpApplication.


 

<@Application Inherits="Microsoft.SharePoint.ApplicationRuntime.SPHttpApplication" >


 

In addition to including a custom HttpApplication object, the WSS architecture uses a custom

HttpHandler and a custom HttpModule. These two WSS-specific components are integrated

into the HTTP Request Pipeline for a Web application using standard entries in the

web.config file.


 

Examine the following XML fragment that is taken from the standard web.config file used by a WSS 3.0 Web application.


 

<configuration>

<system.web>

<httpHandlers>

<remove verb="GET,HEAD,POST" path="*" />

<add verb="GET,HEAD,POST" path="*" type="Microsoft.SharePoint.ApplicationRuntime.SPHttpHandler, ..." />

</httpHandlers>

<httpModules>

<clear />

<add name="SPRequest"

type="Microsoft.SharePoint.ApplicationRuntime.SPRequestModule, ..." />

<!-- other standard ASP.NET httpModules added back in -->

</httpModules>

</system.web>

</configuration>


 

The WSS team members have created their own HttpModule named SPRequestModule to initialize various aspects of the WSS runtime environment. You can see that the standard WSS web.config file configures SPRequestModule so that it is the first HttpModule to respond to application-level events in the HTTP Request Pipeline of ASP.NET. If you examine the web.config file for a WSS Web application, you will see that WSS adds back in several of the standard HttpModule components from the ASP.NET Framework that deal with things such as output caching and various types of authentication.


 

The standard WSS web.config file also registers an HttpHandler named SPHttpHandler and configures it with a path of "*". This allows WSS to provide the SPHttpHandler class as a single endpoint for all incoming requests.


 

As you can see, the architecture of WSS is made possible through extending the HTTP Request Pipeline. This allows WSS to fully leverage the underlying capabilities of the ASP.NET Framework while also taking over control of each and every request that targets a Web application.


 


 


 


 

….. HaPpY CoDiNg

Partha(Aurum)

IIS6 and SharePoint Architecture

IIS6 Architecture

The primary goal of this chapter is for you to understand the internal workings of a Web application and how it initializes the WSS runtime environment.


 

IIS Web Sites and Virtual Directories

Both ASP.NET and WSS rely on IIS 6.0 to supply the underlying listening mechanism to process incoming HTTP requests and supply a management infrastructure for launching and running worker processes on the Web server. An IIS Web site provides an entry point into the IIS Web server infrastructure. Each IIS Web site is configured to listen for and process incoming HTTP requests that meet certain criteria. For example, an IIS Web site can be configured to handle requests coming in over a specific IP address or port number or can be routed to the Web server by using a specific host header.


 

IIS automatically creates and configures an IIS Web site named Default Web Site that listens for HTTP requests coming in over port 80 on any of the IP addresses supported on the local Web server. Each IIS Web site is configured to map to a root directory, which is a physical directory on the file system of the hosting Web server. For example, standard configuration for IIS maps the Default Web Site to a root directory located at C:\Inetpub\wwwroot. In the most straightforward routing scenarios, IIS maps incoming HTTP requests to physical files inside the root directory. For example, IIS will respond to a request for http://www.MyTestApp.com/page1.htm by simply loading the contents of the file located at c:\Inetpub\wwwroot\MyTestApp\page1.htm into memory and streaming it back to the client.


 

Note that IIS tracks configuration information about its IIS Web sites and virtual directories in a repository known as the IIS metabase. The IIS metabase lives on the file system of each front-end Web server running IIS. For example, when you create and configure an IIS Web site using the IIS administration utility, IIS tracks these changes by writing entries to the local IIS metabase.


 

ISAPI Extensions and ISAPI Filters


 

IIS also supports the Internet Server Application Programming Interface (ISAPI) programming model, which provides the opportunity for more sophisticated routing scenarios. In particular, the ISAPI programming model allows you to configure an IIS Web site or virtual directory so that incoming requests trigger the execution of custom code on the Web server. The ISAPI programming model consists of two primary component types: ISAPI extensions and ISAPI filters.


 

An ISAPI extension is a component DLL that plays the role of an endpoint for an incoming request. The fundamental concept is that IIS can map incoming requests to a set of endpoints that trigger the execution of code within an ISAPI extension DLL. An ISAPI extension DLL must be installed on the Web server and configured at the level of either an IIS Web site or virtual directory. Configuration commonly involves associating specific file extensions with the ISAPI extensions by using an IIS application map.


 

While an ISAPI extension serves as an endpoint, an ISAPI filter plays the role of an interceptor. An ISAPI filter is installed and configured at the level of the IIS Web site. Once installed, an ISAPI filter intercepts all incoming requests targeting that IIS Web site. The fundamental concept is that an ISAPI filter can provide pre-processing and post-processing for each and every incoming request. ISAPI filters are typically created to provide low-level functionality for an IIS Web site, such as custom authentication and request logging.


 

Application Pools and the IIS Worker Process


 

IIS provides a flexible infrastructure for managing worker processes by using application pools. An application pool
is a configurable entity that allows you to control how IIS maps IIS Web sites and virtual directories to instances of the IIS worker process. Note that instances of the IIS worker process are launched using an executable named w3wp.exe. The routing architecture of IIS is controlled by a kernel-level device driver named http.sys. This device driver listens for incoming HTTP requests and uses information in the IIS metabase to route them to whatever instance of w3wp.exe is associated with the target application pool. If http.sys determines that the target application pool doesn't have a running instance of w3wp.exe, it launches a new instance on demand to process the request.


 

Each IIS Web site and virtual directory can be configured to run in its own isolated application pool. Conversely, you can configure many different IIS Web sites and virtual directories to run in the same application pool for greater efficiency. The key observation you should make is that a tradeoff exists between isolation and efficiency. To achieve greater isolation means you must run more instances of w3wp.exe, which compromises efficiency. To achieve greater efficiency means you must map multiple IIS Web sites and virtual directories to fewer instances of the IIS worker process, which compromises isolation.


 

Every application pool has an important setting known as the application pool identity. The application pool identity is configured with a specific Windows user account that is either a local account on the Web server or a domain account within an Active Directory directory service domain. When http.sys launches a new instance of w3wp.exe for a specific application pool, it uses the application pool identity to initialize a Windows security token that serves as the process token. This is important because it establishes the "runs as" identity for code that runs within the IIS worker process.


 

By default, IIS uses the identity of the local Network Service account when you create a new application pool. However, you can configure the application pool identity by using any other user account you like. When deploying Web sites based on ASP.NET and WSS, it is recommended to configure the application pool identity with a domain account instead of the Network Service account. This is especially true in a Web farm environment when you need to synchronize the identity of an application pool across multiple front-end Web servers in the farm.


 

ASP.NET 2.0 Framework

The ASP.NET Framework is implemented as an ISAPI extension named aspnet_isapi.dll. The basic configuration for ASP.NET involves registering application maps for common ASP.NET file extensions including .aspx, .ascx, .ashx, and .asmx at the level of an IIS Web site or virtual directory. When IIS sees an incoming request targeting a file with one of these extensions, it forwards the request to aspnet_isapi.dll, which effectively passes control over to the ASP.NET Framework. The way in which the ASP.NET Framework processes a request largely depends on the extension of the target file. It is important to note that the ASP.NET Framework runs each ASP.NET application with a certain level of isolation. This is true even in a scenario when you have configured multiple ASP.NET applications to run within the same IIS application pool. The ASP.NET Framework provides isolation between ASP.NET applications running inside the same instance of the IIS worker process by loading each of them into a separate .NET Framework AppDomain.


 


 

….. HaPpY CoDiNg

Partha(Aurum)

Monday, July 19, 2010

A Simple Feature and Feature Receiver (Event handler to a Feature)

Creating Feature:

Let's start off by creating a new Class Library DLL project named HelloWorld. We are going to create a C# project in our example. Eventually, we will add code that will be compiled into the output DLL for the feature's event handlers. Before creating the feature.xml file, consider that the files for this feature must be deployed in their own special directory inside the WSS system directory named FEATURES. The FEATURES directory is located inside another WSS system directory named TEMPLATE.


 

c:\Program Files\Common Files\Microsoft Shared\web server extensions\12\TEMPLATE\FEATURES


 

Given the requirements of feature deployment, it makes sense to create a parallel hierarchy of folders within a Visual Studio project used to develop a WSS feature. This will make it easier to copy the feature files to the correct location and test them as we do our development work. Start by adding a folder named TEMPLATE to the root directory of the current project. Once you have created the TEMPLATE directory, create another directory inside that named FEATURES. Finally, create another directory inside the FEATURES directory using the same name as the name of the feature project. In this case the name of this directory is HelloWorld. Next, create an XML file named feature.xml inside the HelloWorld directory. Add the following XML to the feature.xml file to add a top-level Feature element along with attributes that define the feature itself.


 

<Feature

Id="B2CB42E2-4F0A-4380-AABA-1EF9CD526F20"

Title="Hello World Feature"

Description="This is my very first custom feature"

Scope="Web"

Hidden="FALSE"

ImageUrl="menuprofile.gif"

xmlns="http://schemas.microsoft.com/sharepoint/">

<ElementManifests>

<ElementManifest Location="elements.xml" />

</ElementManifests>

</Feature>


 

You see that a feature is defined using a Feature element containing attributes such as Id, Title, Description, Version, Scope, Hidden and ImageUrl. You must create a new GUID for the Id attribute so that your feature can be uniquely identified. You create the feature's Title and Description attributes using user-friendly text. These attributes will be shown directly to the users on the WSS administrative pages used to activate and deactivate features. The Scope defines the context in which the feature can be activated and deactivated. The feature we are creating has a scope equal to Web, which means it can be activated and deactivated within the context of the site. If you assign a Scope value of Site, your feature will then be activated and deactivated within the scope of a site collection. The two other possible scopes for defining a feature are WebApplication scope and Farm scope. As you can see, the Hidden attribute has a value of FALSE. This means that, once installed within the farm, our feature can be seen by users who might want to activate it. You can also create a feature where the Hidden attribute has a value of TRUE. This has the effect of hiding the feature in the list of available features shown to users. Hidden features must be activated from the command line, through custom code, or through an activation dependency with another feature. You will also notice that the ImageUrl attribute has a value that points to one of the graphic images that is part of the basic WSS installation. This image will be shown next to the feature in the user interface. The last part of the feature.xml file shown previously is the ElementManifests element. This element contains inner ElementManifest elements that reference other XML files where you will define the elements that make up the feature. In our case, there is a single ElementManifest element that uses the location attribute to point to a file named element.xml.


 

Now it's time to create the element.xml file and define a single CustomAction element that will be used to add a simple menu command to the Site Actions menu. Add the following XML, which defines a CustomAction element to elements.xml.


 

<Elements xmlns="http://schemas.microsoft.com/sharepoint/">

<CustomAction

Id="SiteActionsToolbar"

GroupId="SiteActions"

Location="Microsoft.SharePoint.StandardMenu"

Sequence="100"

Title="Hello World"

Description="A custom menu item added using a feature"

ImageUrl="_layouts/images/menuprofile.gif" >

<UrlAction Url="http://msdn.microsoft.com"/>

</CustomAction>

</Elements>


 

This CustomActions element has been designed to add a menu command to the Site Actions menu. It provides a user-friendly Title and Description as well as a URL that will be used to redirect the user when the Menu command is selected. While this example of a feature with a single element does not go very far into what can be done with features, it provides us with a simple starting point for going through the steps of installing and testing a feature. Now that we have created the feature.xml file and the elements.xml file to define the HelloWorld feature, there are three steps involved in installing it for testing purposes. First, you must copy the HelloWorld feature directory to the WSS system FEATURES directory. Second, you must run a STSADM.EXE operation to install the feature with WSS. Finally, you must activate the feature inside the context of a WSS site. You can automate the first two steps by creating a batch file named install.bat at the root directory of the HelloWorld project and adding the following command line instructions.


 

REM – Remember to remove line breaks from first two lines

@SET TEMPLATEDIR="c:\program files\common files\microsoft shared\web server extensions\12\Template"

@SET STSADM="c:\program files\common files\microsoft shared\web server extensions\12\bin\stsadm"

Echo Copying files

xcopy /e /y TEMPLATE\* %TEMPLATEDIR%

Echo Installing feature

%STSADM% -o InstallFeature -filename HelloWorld\feature.xml -force

Echo Restart IIS Worker Process

IISRESET


 

Actually, you can also automate the final step of activating the feature within a specific site by running the ActivateFeature operation with the STSADM utility. Once you have added the install.bat file, you can configure Visual Studio to run it each time you rebuild the HelloWorld project by going to the Build Events tab within the Project Properties and adding the following post-build event command line instructions.


 

cd $(ProjectDir)

Install.bat


 

The first line with cd $(ProjectDir)
is required to change the current directory to that of the project directory. The second line runs the batch file to copy the feature files to the correct location and install the feature with the InstallFeature operation of the command-line STSADM.EXE utility. Once the feature has been properly installed, you should be able to activate it within the context of a site. Within the top-level site of the site collection you created earlier this chapter.


 

Adding an Event Handler to a Feature

Now it's time to take the example of the HelloWorld feature a little further by adding event handlers and programming against the WSS object model. First, start by adding a project reference to Microsoft.SharePoint.dll. Next, locate the source file named Class1.cs and rename it to FeatureReceiver.cs. Next, add the following code.


 

using System;

using Microsoft.SharePoint;


 

namespace HelloWorld{


 

public class FeatureReceiver : SPFeatureReceiver

{

public override void FeatureInstalled(SPFeatureReceiverProperties properties){}

public override void FeatureUninstalling(SPFeatureReceiverProperties properties) { }

public override void FeatureActivated(SPFeatureReceiverProperties properties)

{

SPWeb site = (SPWeb)properties.Feature.Parent;

// track original site Title using SPWeb property bag

site.Properties["OriginalTitle"] = site.Title;

site.Properties.Update();

// update site title

site.Title = "Hello World";

site.Update();

}

public override void FeatureDeactivating(SPFeatureReceiverProperties properties)

{

// reset site Title back to its original value

SPWeb site = (SPWeb)properties.Feature.Parent;

site.Title = site.Properties["OriginalTitle"];

site.Update();

} } }


 

The first thing you should notice is how you create an event handler that fires when a feature is activated or deactivated. You do this by creating a class that inherits from the SPFeatureReceiver class. As you can see, you handle events by overriding virtual methods in the base class such as FeatureActivated and FeatureDeactivating. There are also two other event handlers that fire when a feature is installed or uninstalled, but we are not going to use them in this introductory example.


 

The FeatureActivated method has been written to update the title of the current site using the WSS object model. Note the technique used to obtain a reference to the current site–the properties parameter is used to acquire a reference to an SPWeb object. The properties parameter is based on the SPFeatureReceiverProperties class that exposes a Feature property that, in turn, exposes a Parent property that holds a reference to the current site. The site title is changed by assigning a new value to the Title property of the SPWeb object and then calling the Update method. Also note that this feature has been designed to store the original value of the site Title so that it can be restored whenever the feature is deactivated. This is accomplished by using a persistent property bag scoped to the site that is accessible through an SPWeb object's Properties collection. Note that many of the objects in the WSS object model have a similar Properties property, which can be used to track name-value pairs using a persistent property bag. WSS handles persisting these named value pairs to the content database and retrieving them on demand.

Now that we have written the code for the feature's two event handlers, it's time to think about what's required to deploy the HelloWorld.dll assembly. The first thing to consider is that this assembly DLL must be deployed in the Global Assembly Cache (GAC), which means you must add a key file to the project in order to sign the resulting output DLL during compilation with a strong name. Once you have added the key file and configured the HelloWorld project to build Hello- World.dll with a strong name, you can also add another instruction line to the post-event build command line to install (or overwrite) the assembly in the GAC each time you build the current project. The command line instructions for the post-event build should now look like this:


 

"%programfiles%\Microsoft Visual Studio 8\SDK\v2.0\Bin\gacutil.exe" -if $(TargetPath)

cd $(ProjectDir)

Install.bat


 

The next step is to update the feature.xml file with two new attributes so that WSS knows that there are event handlers that should be fired whenever the feature is activated or deactivated. This can be accomplished by adding the ReceiverAssembly attribute and the ReceiverClass attribute, as shown here.


 

<Feature

Id="B2CB42E2-4F0A-4380-AABA-1EF9CD526F20"

Title="Hello World Feature"

Description="This is my very first custom feature"

Version="1.0.0.0"

Scope="Web"

Hidden="FALSE"

ImageUrl="menuprofile.gif"

ReceiverAssembly="HelloWorld, Version=1.0.0.0, Culture=neutral,

PublicKeyToken=b59ad8f489c4a334"

ReceiverClass="HelloWorld.FeatureReciever"

xmlns="http://schemas.microsoft.com/sharepoint/">

<ElementManifests>

<ElementManifest Location="elements.xml" />

</ElementManifests>

</Feature>


 

The ReceiverAssembly attribute should contain the four-part name of an assembly that has already been installed in the GAC. The ReceiverClass attribute should contain the namespacequalified name of a public class within the receiver assembly that inherits SPFeatureReceiver. Once you have made these changes to the feature.xml file, you should be able to test your work. When you rebuild the HelloWorld project, Visual Studio should run the install.bat file to copy the updated version of the feature.xml file to the WSS FEATURES directory and to install the updated version of feature.xml with WSS. The build process should also compile Hello- World.dll with a strong name and install it in the GAC. Note that you will likely be required to run an IISRESET command to restart the IIS worker process. This is due to the fact that features and assemblies loaded from the GAC are cached by WSS within the IIS worker process.

At this point, you should be able to test your work by activating and deactivating the feature within the context of a WSS site. When you activate the site, it should change the Title of the site to "Hello World." When you deactivate the feature, it should restore the Title of the site to the original value.


 

….. HaPpY CoDiNg

Partha(Aurum)

Wednesday, July 14, 2010

ASP.NET 2.0 Web Part Vs SharePoint based Web Part

A nice comparisons between asp.net 2.0 custom webpart and sharePoint based web part.

ASP.NET 2.0 Web Part (System.Web.UI.WebControls.WebParts.WebPart)

SharePoint-based Web Part (Microsoft.SharePoint.WebPartPages.WebPart)

  • For most business needs.
  • To distribute your Web Part to sites that run ASP.NET 2.0 or SharePoint sites.
  • When you want to reuse one or more Web Parts created for ASP.NET 2.0 sites on SharePoint sites.
  • To use data or functionality provided by Windows SharePoint Services 3.0. For example, you are creating a Web Part that works with site or list data.
  • When you want to migrate a set of Web Parts using the SharePoint-based Web Part infrastructure to Windows SharePoint Services 3.0.
  • To create cross page connections.
  • To create connections between Web Parts that are outside of a Web Part zone.
  • To work with client-side connections (Web Part Page Services Component).
  • To use a data-caching infrastructure that allows caching to the content database.


 

Windows SharePoint Services (WSS) V3.0 Web Part Infrastructure

The WSS V3.0 web part infrastructure uses many of the ASP.NET 2.0 web part control sets. The infrastructure also creates its own controls by inheriting them from the base class provided by the ASP.NET 2.0 web part control set.

There are three controls that make up the core web part infrastructure in WSS V3.0. They are SPWebPartManager, WebPartZone and WebPart.

Conclusion

Developers now have two sets of web part frameworks to work with. It is important to understand the difference between the two frameworks and when to use the appropriate framework to deliver wanted functionality.

…… HaPpY CoDiNg

Partha (Aurum)

Thursday, July 8, 2010

Creating Dynamic DataList control in C#/ASP.Net

Creating Dynamic DataList control in C#/ASP.Net

In precise, it can be done by implementing ITemplate interface in System.Web.UI namespace.

In this article, I will explain the construction of dynamic DataList control by building dynamic template columns. Moving forward, I will create dynamic template columns for DataList control specific to the resultset returned by the stored procedure. Template columns for datalist can be created by implementing the interface ITemplate in System.Web.UI namespace. Refer the code listing 1 and class diagram in figure 2 for ITemplate interface

Steps for creating Template class

1. Create a Template class (MyTemplate class, Listing 2 –TemplateClass for DataList) that implements ITemplate interface.

2. Create a constructor for the class that takes ListItemType as argument so that we can determine whether we are constructing HeaderTemplate or ItemTemplate or FooterTemplate. Also we can make it through by exposing ListItemType as public property.

3. Implement the InstantiateIn(Control container) method and construct the corresponding template from the input got from the constructor. Refer the code (Listing 2 –TemplateClass for DataList) for clear understanding.

4. For data binding and child control that requires data binding, create a data binding event to fill the data with the control. This event will be raised after the template items are created with all its controls.

The above 4 steps have been implemented in the code, Listing 2 –TemplateClass for DataList

Constructing Dynamic DataList

The DataList will have Header template with Category Name displayed. For example, if it is displaying ASP.Net article the category name will be ASP.Net. The articles will be displayed in the item template i.e. for every article in the item template we will display the title of the article as a hyperlink that links to the original URL of the article with a description and author name. The footer will show the number of articles present in this category.



protected void Page_Load(object sender, EventArgs e)

{
DataList dl1 = new
DataList();
DataTable dt = new
DataTable();
dt.Columns.Add("Links");
DataRow dr;
for (int i = 1; i < 20; i++)
{
dr = dt.NewRow();
dr["Links"] = "A" + i;
dt.Rows.Add(dr);
dt.AcceptChanges();
}
dl1.RepeatDirection = RepeatDirection.Horizontal;
dl1.RepeatColumns = 10;
if (dt.Rows.Count > 0)
{
dl1.HeaderTemplate = new MyTemplate(ListItemType.Header);
dl1.FooterTemplate = new MyTemplate(ListItemType.Footer);
dl1.ItemTemplate = new MyTemplate(ListItemType.Item);
dl1.SelectedItemTemplate = new MyTemplate(ListItemType.SelectedItem);
dl1.Width = Unit.Percentage(100);
dl1.GridLines = GridLines.Both;
dl1.DataSource = dt;
dl1.DataBind();
pnl.Controls.Add(dl1);
}
}

The MyTemPlage Class – This act as the content data binding

public class MyTemplate : ITemplate

{

ListItemType ItemType;
public MyTemplate(ListItemType _ItemType)
{
ItemType = _ItemType;
}
//This is the implentation fo the ITemplate interface
#region ITemplate Members
public void InstantiateIn(Control container)
{
Literal lc = new
Literal();
LinkButton lb = new
LinkButton();
//Determining which type of ListItem has come header or item etc.
switch (ItemType)
{
case ListItemType.Header:
lc.Text = "<div id=\"nifty\" class=\"PostCategory\">Header</div>";
break;
case
ListItemType.Item:
lc.DataBinding += new EventHandler(TemplateControl_DataBinding);
lb.DataBinding += new EventHandler(lb_DataBinding);
break;
case
ListItemType.SelectedItem:
lb.Text = "<div style='background-color:Red'><A href=DataListTest.aspx >Test Link</a></div> ";
break;
case
ListItemType.Footer:
lc.Text = "<div style=\"text-align:right\">Footer</div>";
break;
}
//container.Controls.Add(lc);
//Adding to the container - here container is the DataList
container.Controls.Add(lb);
}
void lb_DataBinding(object sender, EventArgs e)
{
LinkButton lb = (LinkButton)sender;
//Thru lb.NamingContainer the DataList is being determined
DataListItem container = (DataListItem)lb.NamingContainer;
lb.Text = "<div style='background-color:#FFFFCC'><A href=DataListTest.aspx?Val=" + DataBinder.Eval(container.DataItem, "Links") + ">" + DataBinder.Eval(container.DataItem, "Links") + " </a></div> ";
lb.CommandName = "Select";
}
private void TemplateControl_DataBinding(object sender, System.EventArgs e)
{
Literal lc = (Literal)sender;
DataListItem container = (DataListItem)lc.NamingContainer;
lc.Text += "<div style='background-color:#FFFFCC'><A href=" + DataBinder.Eval(container.DataItem, "Links") + ">" + DataBinder.Eval(container.DataItem, "Links") + " </a></div> ";
}
#endregion
}

...HaPpY CoDiNg

Partha (Aurum)