I installed a TFS2012 as a test system and doing some tests before we go productive.
This includes to define many BuildDefinitions which was a lot of work.
After the tests are successful, an new server will be installed with TFS2012 on it.
For this new server - which operates then as the productive system - i would like to restore the BuildDefinitions from the test system. But only the BuildDefinitions, not the whole TeamCollections. Because i ran test checkins and i don`t want these on my productive server.
Now, is it possible to backup and restore BuildDefinitions only?
Maybe it is possible directly throught the Sql database?, but i`am a little affraid of references there, pointing on some other tables.
Best Regards, Peter Bucher
Build definitions are not source controlled. The only option is relying on the TFS database backup where can restore or view the tbl_BuildDefinition* tables in the Tfs_DefaultCollection database.
There is a user voice for this feature and also you can use TFS API to do it.
Add a vote on uservoice:
provide a way to version-control build definitions
Using TFS API
How can I copy a TFS 2010 Build Definition?
Finally i decided not to touch the database, because there are references to a lot of other tables.
I used the TFS API v11 (TFS2012) and a bit C# Code, which i fitted to my needs from this base: How can I copy a TFS 2010 Build Definition?
It copies all Build Definitions from one TFS2012 Server to another. For both servers there is the need to specifiy a TeamCollection and a TeamProject.
So, the copy-task has to be done per TeamProject.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.TeamFoundation.Build.Client;
using Microsoft.TeamFoundation.Client;
using Microsoft.TeamFoundation.VersionControl.Client;
namespace TFSBuildDefinitionCreator
{
internal class Program
{
private static void Main(string[] args)
{
// Copies build definitions from one server to another.
// Uses the TeamFoundation API V11 (TFS2012).
// Code was used to copy b uild definitions from a test server to a productive.
string sourceServer = "http://testTfs:8080/tfs/MyTeamCollection";
string sourceTeamProject = "MyTeamProject";
string targetServer = "https://productiveTfs:8080/tfs/MyTeamCollection";
string targetTeamProject = "MyTeamProject";
// DropLocation for defininitions: Share on which the build should be dropped.
string defaultDropLocation = "\\\\MyBuildserver\\Builds$";
// Change the DefaultProcessTemplate in the following method below: GetDefaultProcessTemplateByServerPathFromBuildServer.
CopyBuildDefinitions(sourceServer, sourceTeamProject, targetServer, targetTeamProject, defaultDropLocation);
Console.Read();
}
private static IBuildServer GetBuildServerFromServerUrl(string serverUrl)
{
var tfs = TeamFoundationServerFactory.GetServer(serverUrl);
return (IBuildServer)tfs.GetService(typeof(IBuildServer));
}
private static IBuildController GetDefaultBuildControllerFromBuildServer(IBuildServer buildServer)
{
return buildServer.QueryBuildControllers()[0];
}
private static IProcessTemplate GetDefaultProcessTemplateByServerPathFromBuildServer(IBuildServer buildServer, string teamProject)
{
var processTemplates = buildServer.QueryProcessTemplates(teamProject);
var result = processTemplates.First(t => t.ServerPath.Contains("/BuildProcessTemplates/MyDefaultTemplate.xaml"));
return result;
}
private static void CopyBuildDefinitions(string sourceServer, string sourceTeamProject, string targetServer,
string targetTeamProject, string defaultDropLocation)
{
var sourceBuildServer = GetBuildServerFromServerUrl(sourceServer);
var sourceBuildDetails = sourceBuildServer.QueryBuildDefinitions(sourceTeamProject);
foreach (var sourceBuildDetail in sourceBuildDetails)
{
CopyBuildDefinition(sourceBuildDetail, targetServer, targetTeamProject, defaultDropLocation);
}
}
private static void CopyBuildDefinition(IBuildDefinition buildDefinition, string targetServer, string targetTeamProject, string defaultDropLocation)
{
var targetBuildServer = GetBuildServerFromServerUrl(targetServer);
var buildDefinitionClone = targetBuildServer.CreateBuildDefinition(targetTeamProject);
buildDefinitionClone.BuildController = GetDefaultBuildControllerFromBuildServer(targetBuildServer);
buildDefinitionClone.ContinuousIntegrationType = buildDefinition.ContinuousIntegrationType;
buildDefinitionClone.ContinuousIntegrationQuietPeriod = buildDefinition.ContinuousIntegrationQuietPeriod;
// Noch ändern.
//buildDefinitionClone.DefaultDropLocation = buildDefinition.DefaultDropLocation;
buildDefinitionClone.DefaultDropLocation = defaultDropLocation;
buildDefinitionClone.Description = buildDefinition.Description;
buildDefinitionClone.Enabled = buildDefinition.Enabled;
//buildDefinitionClone.Name = String.Format("Copy of {0}", buildDefinition.Name);
buildDefinitionClone.Name = buildDefinition.Name;
//buildDefinitionClone.Process = buildDefinition.Process;
buildDefinitionClone.Process = GetDefaultProcessTemplateByServerPathFromBuildServer(targetBuildServer, targetTeamProject);
buildDefinitionClone.ProcessParameters = buildDefinition.ProcessParameters;
foreach (var schedule in buildDefinition.Schedules)
{
var newSchedule = buildDefinitionClone.AddSchedule();
newSchedule.DaysToBuild = schedule.DaysToBuild;
newSchedule.StartTime = schedule.StartTime;
newSchedule.TimeZone = schedule.TimeZone;
}
foreach (var mapping in buildDefinition.Workspace.Mappings)
{
buildDefinitionClone.Workspace.AddMapping(
mapping.ServerItem, mapping.LocalItem, mapping.MappingType, mapping.Depth);
}
buildDefinitionClone.RetentionPolicyList.Clear();
foreach (var policy in buildDefinition.RetentionPolicyList)
{
buildDefinitionClone.AddRetentionPolicy(
policy.BuildReason, policy.BuildStatus, policy.NumberToKeep, policy.DeleteOptions);
}
buildDefinitionClone.Save();
}
}
}
Hope that helps others.
Related
I am currently developing a utility to help automate our report deployment process. Multiple files, in multiple folders, to multiple servers.
I am using the reportservice2010.asmx web service, and I am deploying my files to the server - so most of the way there.
My issue is that I have shared data sets and shared data sources, which are deployed to individual folders, separate to the report folders. When the deployment occurs the web service looks locally for the data source rather than in the data source folder, giving an error like:
The dataset ‘CostReduction’ refers to the shared data source ‘CostReduction’, which is not
published on the report server. The shared data source ‘CostReduction’ must be published
before this report can run.
The data source/set has been deployed and the report functions correctly but I need to suppress these error messages as they may be hiding other actual errors.
I can hard code a lookup that checks if the data source/set exists and manually filter them via that, but it seems very in-efficient. Is there any way I can tell the web service where to look for these files or another approach that other people have used?
I'm not looking at changing the reports so the data source is read from
/DataSources/DataSourceName
as there are lots of reports and that's not how our existing projects are configured.
Many thanks in advance.
I realize you are using VB, but perhaps this will give you a clue if you convert it from C# to VB, using one of the translators on the web.
Hopefully this will give you a lead in the right direction.
When All the reports in a particular folder, referred to here as the 'parent folder', all use the same Shared Data source, I use this to set all the reports to the same shared Data Source (in this case "/DataSources/Shared_New")
using GetPropertiesSample.ReportService2010;
using System.Diagnostics;
using System.Collections.Generic; //<== required for LISTS
using System.Reflection;
namespace GetPropertiesSample
{
class Program
{
static void Main(string[] args)
{
GetListOfObjectsInGivenFolder_and_ResetTheReportDataSource("0_Contacts"); //<=== This is the parent folder
}
private static void GetListOfObjectsInGivenFolder_and_ResetTheReportDataSource(string sParentFolder)
{
// Create a Web service proxy object and set credentials
ReportingService2010 rs = new ReportingService2010();
rs.Credentials = System.Net.CredentialCache.DefaultCredentials;
CatalogItem[] reportList = rs.ListChildren(#"/" + sParentFolder, true);
int iCounter = 0;
foreach (CatalogItem item in reportList)
{
iCounter += 1;
Debug.Print(iCounter.ToString() + "]#########################################");
if (item.TypeName == "Report")
{
Debug.Print("Report: " + item.Name);
ResetTheDataSource_for_a_Report(item.Path, "/DataSources/Shared_New"); //<=== This is the DataSource that I want them to use
}
}
}
private static void ResetTheDataSource_for_a_Report(string sPathAndFileNameOfTheReport, string sPathAndFileNameForDataSource)
{
//from: http://stackoverflow.com/questions/13144604/ssrs-reportingservice2010-change-embedded-datasource-to-shared-datasource
ReportingService2010 rs = new ReportingService2010();
rs.Credentials = System.Net.CredentialCache.DefaultCredentials;
string reportPathAndName = sPathAndFileNameOfTheReport;
//example of sPathAndFileNameOfTheReport "/0_Contacts/207_Practices_County_CareManager_Role_ContactInfo";
List<ReportService2010.ItemReference> itemRefs = new List<ReportService2010.ItemReference>();
ReportService2010.DataSource[] itemDataSources = rs.GetItemDataSources(reportPathAndName);
foreach (ReportService2010.DataSource itemDataSource in itemDataSources)
{
ReportService2010.ItemReference itemRef = new ReportService2010.ItemReference();
itemRef.Name = itemDataSource.Name;
//example of DataSource i.e. 'itemRef.Reference': "/DataSources/SharedDataSource_DB2_CRM";
itemRef.Reference = sPathAndFileNameForDataSource;
itemRefs.Add(itemRef);
}
rs.SetItemReferences(reportPathAndName, itemRefs.ToArray());
}
}
To Call it I use this in the 'Main' Method:
GetListOfObjectsInGivenFolder_and_ResetTheReportDataSource("0_Contacts");
In this case "0_Contacts" is the parent folder, itself located in the root directory, that contains all the reports for which I want to reset their DataSources to the new Shared DataSource. Then that Method calls the other method "ResetTheDataSource_for_a_Report" which actually sets the DataSource for the report.
I have researched on how to export BLOBs to image. A DB has an IMAGE column storing several thousand images. I thought of exporting the table but I get a BLOB file error in EMS SQL Manager for InterBase and Firebird.
There have been good posts, but I have still not been able to succeed.
SQL scripts to insert File to BLOB field and export BLOB to File
This example has appeared on numerous pages, including Microsoft's site. I am using INTERBASE (Firebird). I have not found anything related to enabling xp_shell for Firebird, or EMS SQL Manager for InterBase and Firebird (which I have also installed). My guess is: its not possible. I also tried Installing SQL Server Express, SQL Server 2008, and SQL Server 2012. I am at a dead end without having even connected to the server. The reason being I have not managed to start the server. Followed the guide at technet.microsoft: How to: Start SQL Server Agent but there are no services on the right pane to me.
PHP file to download entire column (may not post link due to rep limitation).
It has a MySQL connect section that daunts me. There on my computer is the DB as a GDB file, I also have XAMPP. I can figure out a way to use this as a localhost environment. I hope this is making sense.
Last solution is to use bcp, an idea posted on Stack Overflow titled: fastest way to export blobs from table into individual files. I read the documentation, installed it, but cannot connect to server. I use -S PC-PC -U xxx -P xxx (The server must be wrong) But the information I find all uses -T (Windows Authentication)
Summing up. I am using Firebird, as EMS SQL Manager. I try to extract all images from images table into individual files. These tools both have SQL script screens, but it appears to be in conjunction with xp shell. What would you suggest? Am I using the wrong SQL manager to accomplish this?
There are several ways:
Use isql command BLOBDUMP to write a blob to file,
Use a client library (eg Jaybird for Java, Firebird .net provider for C#) to retrieve the data,
With PHP you can use ibase_blob_get in a loop to get bytes from the blob, and write those to a file.
I don't use nor know EMS SQL Manager, so I don't know if (and how) you can export a blob with that.
The example you link to, and almost all tools you mention are for Microsoft SQL Server, not for Firebird; so it is no wonder those don't work.
Example in Java
A basic example to save blobs to disk using Java 8 (might also work on Java 7) would be:
/**
* Example to save images to disk from a Firebird database.
* <p>
* Code assumes a table with the following structure:
* <pre>
* CREATE TABLE imagestorage (
* filename VARCHAR(255),
* filedata BLOB SUB_TYPE BINARY
* );
* </pre>
* </p>
*/
public class StoreImages {
// Replace testdatabase with alias or path of database
private static final String URL = "jdbc:firebirdsql://localhost/testdatabase?charSet=utf-8";
private static final String USER = "sysdba";
private static final String PASSWORD = "masterkey";
private static final String DEFAULT_FOLDER = "D:\\Temp\\target";
private final Path targetFolder;
public StoreImages(String targetFolder) {
this.targetFolder = Paths.get(targetFolder);
}
public static void main(String[] args) throws IOException, SQLException {
final String targetFolder = args.length == 0 ? DEFAULT_FOLDER : args[0];
final StoreImages storeImages = new StoreImages(targetFolder);
storeImages.store();
}
private void store() throws IOException, SQLException {
if (!Files.isDirectory(targetFolder)) {
throw new FileNotFoundException(String.format("The folder %s does not exist", targetFolder));
}
try (
Connection connection = DriverManager.getConnection(URL, USER, PASSWORD);
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT filename, filedata FROM imagestorage")
) {
while (rs.next()) {
final Path targetFile = targetFolder.resolve(rs.getString("FILENAME"));
if (Files.exists(targetFile)) {
System.out.printf("File %s already exists%n", targetFile);
continue;
}
try (InputStream data = rs.getBinaryStream("FILEDATA")) {
Files.copy(data, targetFile);
}
}
}
}
}
Example in C#
Below is an example in C#, it is similar to the code above.
class StoreImages
{
private const string DEFAULT_FOLDER = #"D:\Temp\target";
private const string DATABASE = #"D:\Data\db\fb3\fb3testdatabase.fdb";
private const string USER = "sysdba";
private const string PASSWORD = "masterkey";
private readonly string targetFolder;
private readonly string connectionString;
public StoreImages(string targetFolder)
{
this.targetFolder = targetFolder;
connectionString = new FbConnectionStringBuilder
{
Database = DATABASE,
UserID = USER,
Password = PASSWORD
}.ToString();
}
static void Main(string[] args)
{
string targetFolder = args.Length == 0 ? DEFAULT_FOLDER : args[0];
var storeImages = new StoreImages(targetFolder);
storeImages.store();
}
private void store()
{
if (!Directory.Exists(targetFolder))
{
throw new FileNotFoundException(string.Format("The folder {0} does not exist", targetFolder), targetFolder);
}
using (var connection = new FbConnection(connectionString))
{
connection.Open();
using (var command = new FbCommand("SELECT filename, filedata FROM imagestorage", connection))
using (var reader = command.ExecuteReader())
{
while (reader.Read())
{
string targetFile = Path.Combine(targetFolder, reader["FILENAME"].ToString());
if (File.Exists(targetFile))
{
Console.WriteLine("File {0} already exists", targetFile);
continue;
}
using (var fs = new FileStream(targetFile, FileMode.Create))
{
byte[] filedata = (byte[]) reader["FILEDATA"];
fs.Write(filedata, 0, filedata.Length);
}
}
}
}
}
}
Have created a brand new Visual Studio 2012 Ultimate SP2 MVC4 project but unable to get CSS class selector intellisense to work?
When I type <p class="m" .... I should get the class "myClass" appearing in intellisense dropdown but nothing happens.
The file I have listed below is: \Views\Shared\_Layout.cshtml
Any Ideas ?
Edit: Have re-installed VS2012 on brand new windows 7 system (running on Mac OSX parallels 8) and still acting in the same way. Also seems the same for MVC 3 projects.
Extensions installed:
Try adding Web Essentials 2012 extension for Visual Studio 2012: http://visualstudiogallery.msdn.microsoft.com/07d54d12-7133-4e15-becb-6f451ea3bea6?SRC=VSIDE
And/Or
Try adding Microsoft Web Developer Tools extension.
I have both of these and using your same example the intellisense works like a charm.
I tried all the above mentioned remedies and suggestions. None of these worked in my environment. According to Microsoft (Under Microsoft connect's bug id 781048), they have not implemented CSS class intellisense for MVC/Razor files but are working on including this in a future release.
I have a 10 minute webcast example of extending VS2012 intellisense that adds one solution that will add intellisense to your VS2012 environment: a Visual Studio Intellisense Extension
The webcast uses MEF to extend Visual Studio to add an intellisense completion source that scans the currently loaded project for CSS class names to add as an intellisense completion set. Here is the css completion source class:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.ComponentModel.Composition;
using Microsoft.VisualStudio.Language.Intellisense;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Operations;
using Microsoft.VisualStudio.Utilities;
using EnvDTE;
using System.Text.RegularExpressions;
using System.Configuration;
using System.Collections.Specialized;
namespace CssClassIntellisense
{
internal class cssClassList
{
public string cssFileName { get; set; } //Intellisense Statement Completion Tab Name
public HashSet<string> cssClasses { get; set; }
}
internal class CssClassCompletionSource : ICompletionSource
{
private CssClassCompletionSourceProvider m_sourceProvider;
private ITextBuffer m_textBuffer;
private List<Completion> m_compList;
private Project m_proj;
private string m_pattern = #"(?<=\.)[A-Za-z0-9_-]+(?=\ {|{|,|\ )";
private bool m_isDisposed;
//constructor
public CssClassCompletionSource(CssClassCompletionSourceProvider sourceProvider, ITextBuffer textBuffer, Project proj)
{
m_sourceProvider = sourceProvider;
m_textBuffer = textBuffer;
m_proj = proj;
}
public void AugmentCompletionSession(ICompletionSession session, IList<CompletionSet> completionSets)
{
ITextSnapshot snapshot = session.TextView.TextSnapshot;
SnapshotPoint currentPoint = (SnapshotPoint)session.GetTriggerPoint(snapshot);
if (TargetAttribute.Inside(currentPoint))
{
var hash = new List<cssClassList>();
//read any .css project file to get a distinct list of class names
if (m_proj != null)
foreach (ProjectItem _item in m_proj.ProjectItems)
{
getCssFiles(_item, hash);
}
//Scan Current Editor's text buffer for any inline css class names
cssClassList cssclasslist = ScanTextForCssClassName(
"Inline", snapshot.GetText());
//If file had any css class names add to hash of files with css class names
if (cssclasslist != null)
hash.Add(cssclasslist);
var _tokenSpanAtPosition = FindTokenSpanAtPosition(session.GetTriggerPoint(m_textBuffer), session);
foreach (cssClassList _cssClassList in hash)
{
m_compList = new List<Completion>();
foreach (string str in _cssClassList.cssClasses.OrderBy(x => x)) //alphabetic sort
m_compList.Add(new Completion(str, str, str, null, null));
completionSets.Add(new CompletionSet(
_cssClassList.cssFileName, //the non-localized title of the tab
_cssClassList.cssFileName, //the display title of the tab
_tokenSpanAtPosition,
m_compList,
null));
}
}
}
private ITrackingSpan FindTokenSpanAtPosition(ITrackingPoint point, ICompletionSession session)
{
SnapshotPoint currentPoint = (session.TextView.Caret.Position.BufferPosition) - 1;
ITextStructureNavigator navigator = m_sourceProvider.NavigatorService.GetTextStructureNavigator(m_textBuffer);
TextExtent extent = navigator.GetExtentOfWord(currentPoint);
return currentPoint.Snapshot.CreateTrackingSpan(extent.Span, SpanTrackingMode.EdgeInclusive);
}
private void getCssFiles(ProjectItem proj, List<cssClassList> hash)
{
foreach (ProjectItem _item in proj.ProjectItems)
{
if (_item.Name.EndsWith(".css") &&
!_item.Name.EndsWith(".min.css"))
{
//Scan File's text contents for css class names
cssClassList cssclasslist = ScanTextForCssClassName(
_item.Name.Substring(0, _item.Name.IndexOf(".")),
System.IO.File.ReadAllText(_item.get_FileNames(0))
);
//If file had any css class names add to hash of files with css class names
if (cssclasslist != null)
hash.Add(cssclasslist);
}
//recursively scan any subdirectory project files
if (_item.ProjectItems.Count > 0)
getCssFiles(_item, hash);
}
}
private cssClassList ScanTextForCssClassName(string FileName, string TextToScan)
{
Regex rEx = new Regex(m_pattern);
MatchCollection matches = rEx.Matches(TextToScan);
cssClassList cssclasslist = null;
if (matches.Count > 0)
{
//create css class file object to hold the list css class name that exists in this file
cssclasslist = new cssClassList();
cssclasslist.cssFileName = FileName;
cssclasslist.cssClasses = new HashSet<string>();
}
foreach (Match match in matches)
{
//creat a unique list of css class names
if (!cssclasslist.cssClasses.Contains(match.Value))
cssclasslist.cssClasses.Add(match.Value);
}
return cssclasslist;
}
public void Dispose()
{
if (!m_isDisposed)
{
GC.SuppressFinalize(this);
m_isDisposed = true;
}
}
}
}
As an FYI, you can also address this issue using Resharper. But that is a 3rd party product that needs to be purchased for Visual Studio
Is it just CSS intellisense that's failed or has it completely stopped throughout Visual Studio?
I had a similar issue that effected the whole of my Visual Studio 2012. It was a while back but I remember deleting a folder from my appdata. Take a look at this link, hopefully it will help:
http://www.haneycodes.net/visual-studio-2012-intellisense-not-working-solved/
You are not going to get intellisense for CSS in VS2012 for Razor views. There is a workaround to use intellisense. Just create one test view(.aspx) using ASPX view engine and include your css file there. Now intellisense will work in new aspx view. All you have to do is copy paste the css class from aspx to Razor view(.cshtml or .vbhtml). I hope this helps.
RavenDB throws InvalidOperationException when IsOperationAllowedOnDocument is called using embedded mode.
I can see in the IsOperationAllowedOnDocument implementation a clause checking for calls in embedded mode.
namespace Raven.Client.Authorization
{
public static class AuthorizationClientExtensions
{
public static OperationAllowedResult[] IsOperationAllowedOnDocument(this ISyncAdvancedSessionOperation session, string userId, string operation, params string[] documentIds)
{
var serverClient = session.DatabaseCommands as ServerClient;
if (serverClient == null)
throw new InvalidOperationException("Cannot get whatever operation is allowed on document in embedded mode.");
Is there a workaround for this other than not using embedded mode?
Thanks for your time.
I encountered the same situation while writing some unit tests. The solution James provided worked; however, it resulted in having one code path for the unit test and another path for the production code, which defeated the purpose of the unit test. We were able to create a second document store and connect it to the first document store which allowed us to then access the authorization extension methods successfully. While this solution would probably not be good for production code (because creating Document Stores is expensive) it works nicely for unit tests. Here is a code sample:
using (var documentStore = new EmbeddableDocumentStore
{ RunInMemory = true,
UseEmbeddedHttpServer = true,
Configuration = {Port = EmbeddedModePort} })
{
documentStore.Initialize();
var url = documentStore.Configuration.ServerUrl;
using (var docStoreHttp = new DocumentStore {Url = url})
{
docStoreHttp.Initialize();
using (var session = docStoreHttp.OpenSession())
{
// now you can run code like:
// session.GetAuthorizationFor(),
// session.SetAuthorizationFor(),
// session.Advanced.IsOperationAllowedOnDocument(),
// etc...
}
}
}
There are couple of other items that should be mentioned:
The first document store needs to be run with the UseEmbeddedHttpServer set to true so that the second one can access it.
I created a constant for the Port so it would be used consistently and ensure use of a non reserved port.
I encountered this as well. Looking at the source, there's no way to do that operation as written. Not sure if there's some intrinsic reason why since I could easily replicate the functionality in my app by making a http request directly for the same info:
HttpClient http = new HttpClient();
http.BaseAddress = new Uri("http://localhost:8080");
var url = new StringBuilder("/authorization/IsAllowed/")
.Append(Uri.EscapeUriString(userid))
.Append("?operation=")
.Append(Uri.EscapeUriString(operation)
.Append("&id=").Append(Uri.EscapeUriString(entityid));
http.GetStringAsync(url.ToString()).ContinueWith((response) =>
{
var results = _session.Advanced.DocumentStore.Conventions.CreateSerializer()
.Deserialize<OperationAllowedResult[]>(
new RavenJTokenReader(RavenJToken.Parse(response.Result)));
}).Wait();
I would like to approach database versioning and automated upgrades in NHibernate from a different direction than most of the strategies proposed out there.
As each object is defined by an XML mapping, I would like to take size and checksum for each mapping file/ configuration and store that in a document database (raven or something) along with a potential custom update script. If no script is found, use the NHibernate DDL generator to update the object schema. This way I can detect changes, and if I need to make DML changes in addition to DDL, or perform a carefully ordered transformation, I can theoretically do so in a controlled, testable manner. This should also maintain a certain level of persistence-layer agnosticism, although I'd imagine the scripts would still necessarily be database system-specific.
The trick would be, generating the "old" mapping files from the database and comparing them to the current mapping files. I don't know if this is possible. I also don't know if I'm missing anything else that would make this strategy prohibitively impractical.
My question, then: how practical is this strategy, and why?
what i did to solve just that problem
version the database in a table called SchemaVersion
query the table to see if schema is up to date (required version stored in DAL), if yes goto 6.
get updatescript with version == versionFromBb from resources/webservices/...
run the script which also alters the schemaversion to the new version
goto 2.
run app
to generate the scripts i have used 2 options
support one rdbms: run SchemaUpdate to export into file and add DML statements manually
support multiple rdbms: use Nhibernate class Table to generate at runtime ddl to add/alter/delete tables and code which uses a session DML
Update:
"what method did you use to store the current version"
small example
something like this
public static class Constants
{
public static readonly Version DatabaseSchemaVersion = new Version(1, 2, 3, 4);
}
public class DBMigration
{
private IDictionary<Version, Action> _updates = new Dictionary<Version, Action>();
private Configuration _config;
private Dialect _dialect;
private IList<Action<ISession>> _actions = new List<Action<ISession>>(16);
private string _defaultCatalog;
private string _defaultSchema;
private void CreateTable(string name, Action<Table> configuretable)
{
var table = new Table(name);
configuretable(table);
string createTable = table.SqlCreateString(_dialect, _config.BuildMapping(), _defaultCatalog, _defaultSchema);
_actions.Add(session => session.CreateSQLQuery(createTable).ExecuteUpdate());
}
private void UpdateVersionTo(Version version)
{
_actions.Add(session => { session.Get<SchemaVersion>(1).Value = version; session.Flush(); });
}
private void WithSession(Action<session> action)
{
_actions.Add(action);
}
public void Execute(Configuration config)
{
_actions.Clear();
_defaultCatalog = config.Properties[NH.Environment.DefaultCatalog];
_defaultSchema = config.Properties[NH.Environment.DefaultSchema];
_config = config;
_dialect = Dialect.GetDialect(config.Properties);
using (var sf = _config.BuildSessionFactory())
using (var session = sf.OpenSession())
using (var tx = session.BeginTransaction())
{
Version dbVersion = session.Get<SchemaVersion>(1).Value;
while (dbVersion < Constants.DatabaseSchemaVersion)
{
_actions.Clear();
_updates[dbVersion].Invoke(); // init migration, TODO: error handling
foreach (var action in _actions)
{
action.Invoke(session);
}
tx.Commit();
session.Clear();
dbVersion = session.Get<SchemaVersion>(1).Value;
}
}
}
public DBMigration()
{
_updates.Add(new Version(1, 0, 0, 0), UpdateFromVersion1);
_updates.Add(new Version(1, 0, 1, 0), UpdateFromVersion1);
...
}
private void UpdateFromVersion1()
{
AddTable("Users", table => table.AddColumn(...));
WithSession(session => session.CreateSqlQuery("INSERT INTO ..."));
UpdateVersionTo(new Version(1,0,1,0));
}
...
}