Create database using Firebird API is failing - api

This is my source code sample inspired by code samples that are part of Firebird installation directory (C:\Program Files\Firebird\Firebird_2_5\examples):
AnsiString db_path = MainForm->exeFilePath + "\\results.fdb"; // C:\Projects\DBX\bin\results.fdb
if (FileExists(db_path))
{
return;
}
HMODULE dll = LoadLibrary(L"fbclient.dll");
if (dll == NULL)
{
throw Exception(L"Failed to load Firebird client library FBCLIENT.DLL!\r\nApplication will exit now.");
}
Pisc_dsql_execute_immediate pisc_dsql_execute_immediate = (Pisc_dsql_execute_immediate)GetProcAddress(dll, "isc_dsql_execute_immediate");
if (pisc_dsql_execute_immediate == NULL)
{
throw Exception(L"Failed to load 'isc_dsql_execute_immediate' entry point in FBCLIENT.DLL. It seems like Firebird installation is corrupted. "
"Contact system administrator.\r\nApplication will exit now.");
}
Pisc_commit_transaction pisc_commit_transaction = (Pisc_commit_transaction)GetProcAddress(dll, "isc_commit_transaction");
if (pisc_commit_transaction == NULL)
{
throw Exception(L"Failed to load 'isc_commit_transaction' entry point in FBCLIENT.DLL. It seems like Firebird installation is corrupted. "
"Contact system administrator.\r\nApplication will exit now.");
}
Pisc_detach_database pisc_detach_database = (Pisc_detach_database)GetProcAddress(dll, "isc_detach_database");
if (pisc_detach_database == NULL)
{
throw Exception(L"Failed to load 'isc_detach_database' entry point in FBCLIENT.DLL. It seems like Firebird installation is corrupted. "
"Contact system administrator.\r\nApplication will exit now.");
}
Pisc_sqlcode pisc_sqlcode = (Pisc_sqlcode)GetProcAddress(dll, "isc_sqlcode");
if (pisc_sqlcode == NULL)
{
throw Exception(L"Failed to load 'isc_sqlcode' entry point in FBCLIENT.DLL. It seems like Firebird installation is corrupted. "
"Contact system administrator.\r\nApplication will exit now.");
}
ISC_STATUS_ARRAY status; /* status vector */
isc_db_handle newdb = NULL; /* database handle */
isc_tr_handle trans = NULL; /* transaction handle */
AnsiString create_db;
create_db.printf("CREATE DATABASE '%s'", db_path.c_str());
long sqlcode;
UnicodeString msg;
if (pisc_dsql_execute_immediate(status, &newdb, &trans, 0, create_db.c_str(), 1, NULL))
{
// ERROR OCCURRES HERE
sqlcode = pisc_sqlcode(status);
msg.printf(L"Operation 'CREATE DATABASE' failed with sqlcode %ld.\r\nApplication will exit now.", sqlcode);
throw Exception(msg.c_str());
}
pisc_commit_transaction(status, &trans);
pisc_detach_database(status, &newdb);
FreeLibrary(dll);
dll = NULL;
Database creation is failing with sql code -902. The directory 'C:\Projects\DBX\bin\' exists and doesn't contain file named 'results.fdb'. Whats the reason for error behaviour and how can I avoid it? Is there somewhere detailed description of Firebird API?

What I forgot were user credentials. See updated code sample (excerpt):
ISC_STATUS_ARRAY status; /* status vector */
isc_db_handle newdb = NULL; /* database handle */
isc_tr_handle trans = NULL; /* transaction handle */
AnsiString create_db;
create_db.printf("CREATE DATABASE '%s' USER '%S' PASSWORD '%S'", db_path.c_str(), MainForm->currentUser.name, MainForm->currentUser.password);
long sqlcode;
UnicodeString msg;
if (pisc_dsql_execute_immediate(status, &newdb, &trans, 0, create_db.c_str(), 1, NULL))
{
sqlcode = pisc_sqlcode(status);
msg.printf(L"Operation 'CREATE DATABASE' failed with sqlcode %ld.\r\nApplication will exit now.", sqlcode);
throw Exception(msg.c_str());
}

Related

mapreduce job on yarn exited with exitCode: -1000 beacuse of resource changed on src filesystem

Application application_1552978163044_0016 failed 5 times due to AM Container for appattempt_1552978163044_0016_000005 exited with exitCode: -1000
Diagnostics:
java.io.IOException: Resource
abfs://xxx#xxx.dfs.core.windows.net/hdp/apps/2.6.5.3006-29/mapreduce/mapreduce.tar.gz
changed on src filesystem (expected 1552949440000, was 1552978240000
Failing this attempt. Failing the application.
Just based on the exception information, it seems to be caused by Azure Storage could not keep the original timestamp of the copied file. I searched a workaround that recommended to change the source code of yarn-common to disable the code block of timestamp check when copy file to avoid the exception throws to make the MR job continous to work.
Here is the source code in the latest version of yarn-common which check the timestamp for copied file and throws the exception.
/** #L255
* Localize files.
* #param destination destination directory
* #throws IOException cannot read or write file
* #throws YarnException subcommand returned an error
*/
private void verifyAndCopy(Path destination)
throws IOException, YarnException {
final Path sCopy;
try {
sCopy = resource.getResource().toPath();
} catch (URISyntaxException e) {
throw new IOException("Invalid resource", e);
}
FileSystem sourceFs = sCopy.getFileSystem(conf);
FileStatus sStat = sourceFs.getFileStatus(sCopy);
if (sStat.getModificationTime() != resource.getTimestamp()) {
throw new IOException("Resource " + sCopy +
" changed on src filesystem (expected " + resource.getTimestamp() +
", was " + sStat.getModificationTime());
}
if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
if (!isPublic(sourceFs, sCopy, sStat, statCache)) {
throw new IOException("Resource " + sCopy +
" is not publicly accessible and as such cannot be part of the" +
" public cache.");
}
}
downloadAndUnpack(sCopy, destination);
}

Editor.GetEntity does not wait for user input (click)

I have two dwg files: PID.dwg & 3D.dwg
The use case is to run a function on PID.dwg and then on 3D.dwg -- particularly in this order.
The commands used in SendCommand below are from a separate DLL file that I load using NETLOAD prior to this function's execution.
Dim app As AcadApplication = CType(Application.AcadApplication, AcadApplication)
' Ctype( Autodesk.AutoCAD.ApplicationServices.Application.AcadApplication,
' Autodesk.AutoCAD.Interop.AcadApplication )
If isPidAnd3dOpened() Then
' Activate PID document
app.ActiveDocument = acDocPid
'acDocPid.Activate()
acDocPid.SendCommand("DOSOMETHINGONPID" & vbCrLf)
' Activate 3D document
app.ActiveDocument = acDoc3d
'acDoc3d.Activate()
acDoc3d.SendCommand("DOSOMETHINGON3D" & vbCrLf)
End If
The function of "DOSOMETINGON3D" requires and input from the user using Editor.GetEntity.
However, when acDoc3d.SendCommand("DOSOMETHINGON3D" & vbCrLf) is executed, it does not pause to wait for user input.
What am I missing?
Probably You have to wait until the command DOSOMETHINGONPID is finished.
In ARX it would be something like this:
CString activeCMDName = _T("DOSOMETHINGONPID");
bool EOL = false;
while (!EOL)
{
CString cmds = Variable::Get(_T("CMDNAMES") );
if (cmds.Find( activeCMDName ) > 0 ) {
Command::Wait();
}
else {
EOL = true;
}
}
where
CString Variable::Get( CString name )
{
CString OutVal;
resbuf rb ;
acedGetVar(name, &rb);
OutVal.SetString(rb.resval.rstring);
acutDelString(rb.resval.rstring);
return OutVal ;
}
void Command::Wait()
{
ResBuf rb;
rb.Add(RTSTR , _T("\\"));
int ret = acedCmd(rb.GetFirst());
}
Sorry, I don't have this code in .net. Hope You will handle this.
First answer is correct, SendCommand cannot handle asynchronous commands. Here is a suggested solution in .Net:
//Create AutoCAD instance, then...
acadApp.ActiveDocument.SendCommand("(command \"NETLOAD\""+#"""C:\\acad\\networkdll\\SecondAssembly.dll"") ");
acadApp.ActiveDocument.SendCommand("#MYCOMMAND 0 ");
//Register EndCommand handler.
_DAcadApplicationEvents_EndCommandEventHandler handler = new
_DAcadApplicationEvents_EndCommandEventHandler(CommandEnded);
acadApp.EndCommand += handler;
waitHandle = new EventWaitHandle(false, EventResetMode.ManualReset);
waitHandle.WaitOne();
acadApp.EndCommand -= handler;
//Close the startup drawing (this requires waiting # SendCommand) because
//Drawing will cause a COMException otherwise. 'Drawing is busy'
//Mostly likely since the ActiceDocument is the startup drawing.
Event Handler:
public void CommandEnded(string globalCommandName)
{
System.Windows.MessageBox.Show(globalCommandName + " just ended.");
waitHandle.Set();
}

failed to create commit: current tip is not the first parent error during commit in libgit2

I am using libgit2 v0.23.0 library for git pull & commit operation. I am calling git_merge(repo,their_heads,1,&merge_opt,&checkout_opts); method & it works fine & merge the changes from remote repository to local repository. But after that when I am calling git_commit_create() method, it throw error as failed to create commit: current tip is not the first parent with error code -15.
I investigate & found that FETCH_HEAD and MERGE_HEAD file contains updated oid, but ORIG_HEAD still containing previous/outdated oid. I am not sure this is the cause of error which I am getting during git_commit_create().
int fetch()
{
qDebug()<<"Fetch";
git_remote *remote = NULL;
const git_transfer_progress *stats;
struct dl_data data;
git_fetch_options fetch_opts = GIT_FETCH_OPTIONS_INIT;
git_repository *repo = NULL;
QString repoPath = "repopath/.git";
int error = git_repository_open(&repo, repoPath.toStdString().c_str());
if (git_remote_lookup(&remote, repo, "origin") < 0) {
if (git_remote_create_anonymous(&remote, repo,"repoURL") < 0)
return -1;
}
fetch_opts.callbacks.update_tips = &update_cb;
fetch_opts.callbacks.sideband_progress = &progress_cb;
fetch_opts.callbacks.credentials = cred_acquire_cb;
data.remote = remote;
data.fetch_opts = &fetch_opts;
data.ret = 0;
data.finished = 0;
stats = git_remote_stats(remote);
download(&data);
if (stats->local_objects > 0) {
printf("\rReceived %d/%d objects in % bytes (used %d local objects)\n",
stats->indexed_objects, stats->total_objects, stats->received_bytes, stats->local_objects);
} else{
printf("\rReceived %d/%d objects in %bytes\n",
stats->indexed_objects, stats->total_objects, stats->received_bytes);
}
git_remote_disconnect(remote);
if (git_remote_update_tips(remote, &fetch_opts.callbacks, 1, fetch_opts.download_tags, NULL) < 0)
return -1;
const git_remote_head **head = NULL;
size_t size = 0;
(git_remote_ls(&head, &size, remote));
git_oid oid = head[0]->oid;
char * commit_id1 = new char[41]; //Commit ID
qDebug()<<"oid:"<<git_oid_tostr(commit_id1, 41, &oid);
git_annotated_commit *anno_out ;
git_annotated_commit_lookup(&anno_out,repo,&oid);
git_checkout_options checkout_opts = GIT_CHECKOUT_OPTIONS_INIT;
checkout_opts.checkout_strategy = GIT_CHECKOUT_FORCE;
const git_annotated_commit **their_heads = const_cast<const git_annotated_commit**>(&anno_out);
git_merge_options merge_opt = GIT_MERGE_OPTIONS_INIT;
merge_opt.file_favor = GIT_MERGE_FILE_FAVOR_UNION;
error = git_merge(repo,their_heads,1,&merge_opt,&checkout_opts);
if(error!=0){
//Error handling
}
else{
qDebug()<<"Merge successfully";
}
git_repository_state_cleanup(repo);
/* Create signature */
git_signature *me = NULL;
(git_signature_now(&me, "username", "username#gmail.com"));
//Tree Lookup
git_tree *tree;
git_object *tree_obj;
(git_revparse_single(&tree_obj, repo, "HEAD^{tree}"));
// Get parent commit
git_oid parentCommitId;
git_commit *parent;
git_oid remoteParentCommitId;
git_commit *remoteParent;
int nparents;
int err;
(git_reference_name_to_id( &parentCommitId, repo, "ORIG_HEAD" ));
(git_commit_lookup( &parent, repo, &parentCommitId ));
(git_reference_name_to_id( &remoteParentCommitId, repo, "MERGE_HEAD" ));
(git_commit_lookup( &remoteParent, repo, &remoteParentCommitId ));
const git_commit *parents [1] = {remoteParent };
git_oid new_commit_id;
err = (git_commit_create(
&new_commit_id,
repo,
"HEAD", /* name of ref to update */
me, /* author */
me, /* committer */
"UTF-8", /* message encoding */
"pull fetch", /* message */
(git_tree *) tree_obj, /* root tree */
1, /* parent count */
parents)); /* parents */
if(err !=0){
//I am getting error here
}
git_remote_free(remote);
return 0;
}
Please suggest me what I have to do in order to resolve this issue ?
Generally, you're seeing this error because you are building a new commit on a branch whose parent is not the current tip of the branch. Indeed, you're building a new commit whose parent is the remote commit not the local one.
There are a few problems:
Some error checking on all the functions is recommended. I see some functions that are likely failing but there is no check for that. For example:
Don't call git_repository_state_cleanup in the middle of your operation. That will abort the merge and cleanup the state files that you're trying to read later. Like MERGE_HEAD.
You're doing a merge. You should have two parent commits (the two commits you're merging) to the new commit. You should pass { parent, remoteParent } as the parents.

How to Invoke dmfStagingToSourceFileWriter class in running method of Batch

I'm stuck on creating a batch job that has two tasks -- one for getting data from staging table and another for exporting teh data from staging table to a flat file.
The first task requires that I invoke DMFStagingWriter class and it' work fine, my batch can populate data into staging table. However, when I try to add some code to complete the second task invoked by dmfStagingToSourceFileWriter class and i could't for the moment.
I have a part started by this comment "// Write from Staging table to file" on my code and there i have difficulty to write the correct code whitch help me to export data from staging table to file
What am I missing?
Thank you.
public void run()
{
DMFDefinitionGroup definitionGroup;//nom proccessing group
DMFDefinitionGroupEntity definitionGroupEntity;
DMFEntity dMFEntity,localEntity;
DMFExecutionId executionId;//nom job
boolean execute;
DMFDefinitionGroupExecution definitionGroupExecution;
DMFStagingWriter stagingWriter;
SAUDMFVendPackingSlipTrans vendPackingSlipTrans;
SAUDMFVendPackingSlipTransDetails vendPackingSlipTransDetails;
DMFEntityType dmfEntityType;
DMFdefinationGroupName definationGroupName;
DMFDataSourceProperties dataSourceProperties;
DMFSourceName source;
FilenameSave filePath;
str headerRow;
DMFStagingToSourceFileWriter dmfStagingToSourceFileWriter;
select firstOnly definitionGroup
join definitionGroupEntity
where definitionGroup.DefinationGroupName == definitionGroupEntity.DefinitionGroup
join dMFEntity
where definitionGroupEntity.Entity == dMFEntity.EntityName
&& dMFEntity.EntityName == "sauVendPackingSlipTrans"
&& definitionGroup.DefinationGroupName == "VendPackingSlipTransAx";
select firstonly vendPackingSlipTransDetails
join vendPackingSlipTrans
where vendPackingSlipTransDetails.SAUDMFVendPackingSlipTrans == vendPackingSlipTrans.RecId ;
executionId = this.getExecutionMethod(definitionGroup.DefinationGroupName);//nom proccessing group
// if no file set, then use repository mode
if(!fileName)
{
execute = DMFDefinitionGroupExecution::serviceInsertOrDisplay( definitionGroup,
executionId,
dMFEntity.EntityName,
'',
'',
vendPackingSlipTransDetails.ProcessingFolder,
vendPackingSlipTransDetails.CompletedFolder,
vendPackingSlipTransDetails.ErrorFolder,
NoYes::Yes,
DMFFileType::File,
1,
vendPackingSlipTransDetails.AccessFolder);
}
// else use file mode
else
{
execute = DMFDefinitionGroupExecution::insertOrDisplay( definitionGroup,
executionId,
'',
NoYes::No,
fileName);
}
definitionGroupExecution = DMFDefinitionGroupExecution::find(definitionGroup.DefinationGroupName,
dMFEntity.EntityName,
executionId);
if(execute)
{
ttsBegin;
definitionGroupExecution.selectForUpdate(true);
definitionGroupExecution.ExecuteTargetStep = NoYes::Yes;
definitionGroupExecution.Update();
ttsCommit;
//Write from ax to stagingWritting
stagingWriter = new DMFStagingWriter();
stagingWriter.parmDMFExecution(DMFExecution::find(executionId));
stagingWriter.parmcalledFrom(true);
stagingWriter.parmInBatch(this.isInBatch());
if(!fileName)
stagingWriter.sauParmRunOnService(true);
stagingWriter.run();
//Write from Staging table to file
definationGroupName = vendPackingSlipTransDetails.StaggingToFileDefGrp;
select firstOnly definitionGroupEntity
where definitionGroupEntity.DefinitionGroup == definationGroupName;
// check if definition group exists
if(!definitionGroupEntity.DefinitionGroup)
throw error(strFmt("Le groupe de traitement %1 n'existe pas", definationGroupName));
source = definitionGroupEntity.Source;//vendPackingSlipTrans_file
// if file name correctly set (mandatory)
if(vendPackingSlipTransDetails.Export_Prefix && vendPackingSlipTransDetails.Export_FileExtension)
{
//filePath = FolderPath + prefixfileName + grpAx + extension;
filePath = strFmt("%1%2_%3_%4", vendPackingSlipTransDetails.FolderPath,
vendPackingSlipTransDetails.Export_Prefix,
vendPackingSlipTransDetails.AXToStaggingDefGrp,
vendPackingSlipTransDetails.Export_FileExtension);
// set first line if required
if(vendPackingSlipTransDetails.ExportFileHeader)
headerRow = this.setHeaderRow(dataSourceProperties.FileColumnDelimiter, vendPackingSlipTransDetails.Export_Prefix);
}
else
throw error(strFmt("Le préfix doit être renseigné dans le paramétrage d'export de l'entité %1", entityType));
// Shared folder has to be set
if (!DMFParameters::find().SharedFolderPath)
throw error("#DMF1444");
// Shared folder has to be accessible
if (DMFParameters::find().ValidationStatus != NoYesError::Yes)
throw error("#DMF1415");
// export to file
dmfStagingToSourceFileWriter = DMFStagingToSourceFileWriter::construct();
dmfStagingToSourceFileWriter.parmsourceTarget(DMFSourceTarget::Source);
dmfStagingToSourceFileWriter.parmEntityName(definitionGroupExecution.Entity);
dmfStagingToSourceFileWriter.parmDefinitionGroupExecution(definitionGroupExecution);
dmfStagingToSourceFileWriter.sauParmDMFEntityType(DMFEntity::find(definitionGroupExecution.Entity).Type);
dmfStagingToSourceFileWriter.sauparmHeaderRow(headerRow);
dmfStagingToSourceFileWriter.saveLast();
dmfStagingToSourceFileWriter.sauVariablesSetter([definationGroupName,source,filePath]);
dmfStagingToSourceFileWriter.run();
//dmfStagingToSourceFileWriter.exportRun();
}
}

Efficient Way to do batch import XMI in Enterprise Architect

Our team are using Enterprise Architect version 10 and SVN for the repository.
Because the EAP file size is quite big (e.g. 80 MB), we exports each packages into separate XMI and stored it into SVN. The EAP file itself is committed after some milestone. The problem is to synchronize the EAP file with work from co worker during development, we need to import lots of XMI (e.g. total can be 500 files).
I know that once the EAP file is updated, we can use Package Control -> Get All Latest. Therefore this problem occurs only during parallel development.
We have used keyboard shorcuts to do the import as follow:
Ctrl+Alt+I (Import package from XMI file)
Select the file name to import
Alt+I (Import)
Enter (Yes)
Repeat step number 2 to 4 until module finished
But still, importing hundreds of file is inefficient.
I've checked that the Control Package has Batch Import/Export. The batch import/export are working when I explicitly hard-coded the XMI Filename, but the options are not available if using version control (batch import/export options are greyed).
Is there any better ways to synchronize EAP and XMI files?
There is a scripting interface in EA. You might be able to automate the import using that. I've not used it but its probably quite good.
I'm not sure I fully understand your working environment, but I have some general points that may be of interest. It might be that if you use EA in a different way (especially my first point below), the need to batch import might go away.
Multiworker
First, multiple people can work on the same EAP file at a time. The EAP file is nothing more than an Access database file, and EA uses locking to stop multiple people editing the same package at the same time. But you can comfortably have multiple people editing different packages in one EAP file at the same time. Putting the EAP file on a file share somewhere is a good way of doing it.
Inbuilt Revision Control
Secondly, EA can interact directly with SVN (and other revision control systems). See this. In short, you can setup your EAP file so that individual packages (and everything below them) is SVN controlled. You can then check out an individual package, edit it, check it back in. Or indeed you can check out the whole branch below a package (including sub packages that are themselves SVN controlled).
Underneath the hood EA is importing and exporting XMI files and checking them in and out of SVN, whilst the EAP file is always the head revision. Just like what you're doing by hand, but automated. It makes sense given that you can all use the one single EAP file. You do have to be a bit careful rolling back - links originating from objects in older versions of one package might be pointing at objects that no longer exist (but you can look at the import log errors to see if this is the case). It takes a bit of getting used to, but it works pretty well.
There's also the built in package baselining functionality - that might be all you need anyway, and works quite well especially if you're all using the same EAP file.
Bigger Database Engine
Thirdly, you don't have to have an EAP file at all. The model's database can be in any suitable database system (MySQL, SQL Server, Oracle, etc). So that gives you all sorts of options for scaling up how its used, what its like over a WAN/Internet, etc.
In short Sparx have been quite sensible about how EA can be used in a multi-worker environment, and its worth exploiting that.
I have created the EA script using JScript for the automation
Here is the script to do the export:
!INC Local Scripts.EAConstants-JScript
/*
* Script Name : Export List of SVN Packages
* Author : SDK
* Purpose : Export a package and all of its subpackages information related to version
* controlled. The exported file then can be used to automatically import
* the XMIs
* Date : 30 July 2013
* HOW TO USE : 1. Select the package that you would like to export in the Project Browser
* 2. Change the output filepath in this script if necessary.
* By default it is "D:\\EAOutput.txt"
* 3. Send the output file to your colleague who wanted to import the XMIs
*/
var f;
function main()
{
// UPDATE THE FOLLOWING OUTPUT FILE PATH IF NECESSARY
var filename = "D:\\EAOutput.txt";
var ForReading = 1, ForWriting = 2, ForAppending = 8;
Repository.EnsureOutputVisible( "Script" );
Repository.ClearOutput( "Script" );
Session.Output("Start generating output...please wait...");
var treeSelectedType = Repository.GetTreeSelectedItemType();
switch ( treeSelectedType )
{
case otPackage:
{
var fso = new ActiveXObject("Scripting.FileSystemObject");
f = fso.OpenTextFile(filename, ForWriting, true);
var selectedObject as EA.Package;
selectedObject = Repository.GetContextObject();
reportPackage(selectedObject);
loopChildPackages(selectedObject);
f.Close();
Session.Output( "Done! Check your output at " + filename);
break;
}
default:
{
Session.Prompt( "This script does not support items of this type.", promptOK );
}
}
}
function loopChildPackages(thePackage)
{
for (var j = 0 ; j < thePackage.Packages.Count; j++)
{
var child as EA.Package;
child = thePackage.Packages.GetAt(j);
reportPackage(child);
loopChildPackages(child);
}
}
function getParentPath(childPackage)
{
if (childPackage.ParentID != 0)
{
var parentPackage as EA.Package;
parentPackage = Repository.GetPackageByID(childPackage.ParentID);
return getParentPath(parentPackage) + "/" + parentPackage.Name;
}
return "";
}
function reportPackage(thePackage)
{
f.WriteLine("GUID=" + thePackage.PackageGUID + ";"
+ "NAME=" + thePackage.Name + ";"
+ "VCCFG=" + getVCCFG(thePackage) + ";"
+ "XML=" + thePackage.XMLPath + ";"
+ "PARENT=" + getParentPath(thePackage).substring(1) + ";"
);
}
function getVCCFG(thePackage)
{
if (thePackage.IsVersionControlled)
{
var array = new Array();
array = (thePackage.Flags).split(";");
for (var z = 0 ; z < array.length; z++)
{
var pos = array[z].indexOf('=');
if (pos > 0)
{
var key = array[z].substring(0, pos);
var value = array[z].substring(pos + 1);
if (key=="VCCFG")
{
return (value);
}
}
}
}
return "";
}
main();
And the script to do the import:
!INC Local Scripts.EAConstants-JScript
/*
* Script Name : Import List Of SVN Packages
* Author : SDK
* Purpose : Imports a package with all of its sub packages generated from
* "Export List Of SVN Packages" script
* Date : 01 Aug 2013
* HOW TO USE : 1. Get the output file generated by "Export List Of SVN Packages" script
* from your colleague
* 2. Get the XMIs in the SVN local copy
* 3. Change the path to the output file in this script if necessary (var filename).
* By default it is "D:\\EAOutput.txt"
* 4. Change the path to local SVN
* 5. Run the script
*/
var f;
var svnPath;
function main()
{
// CHANGE THE FOLLOWING TWO LINES ACCORDING TO YOUR INPUT AND LOCAL SVN COPY
var filename = "D:\\EAOutput.txt";
svnPath = "D:\\svn.xxx.com\\yyy\\docs\\design\\";
var ForReading = 1, ForWriting = 2, ForAppending = 8;
Repository.EnsureOutputVisible( "Script" );
Repository.ClearOutput( "Script" );
Session.Output("[INFO] Start importing packages from " + filename + ". Please wait...");
var fso = new ActiveXObject("Scripting.FileSystemObject");
f = fso.OpenTextFile(filename, ForReading);
// Read from the file and display the results.
while (!f.AtEndOfStream)
{
var r = f.ReadLine();
parseLine(r);
Session.Output("--------------------------------------------------------------------------------");
}
f.Close();
Session.Output("[INFO] Finished");
}
function parseLine(line)
{
Session.Output("[INFO] Parsing " + line);
var array = new Array();
array = (line).split(";");
var guid;
var name;
var isVersionControlled;
var xmlPath;
var parentPath;
isVersionControlled = false;
xmlPath = "";
for (var z = 0 ; z < array.length; z++)
{
var pos = array[z].indexOf('=');
if (pos > 0)
{
var key = array[z].substring(0, pos);
var value = array[z].substring(pos + 1);
if (key=="GUID") {
guid = value;
} else if (key=="NAME") {
name = value;
} else if (key=="VCCFG") {
if (value != "") {
isVersionControlled = true;
}
} else if (key=="XML") {
if (isVersionControlled) {
xmlPath = value;
}
} else if (key=="PARENT") {
parentPath = value;
}
}
}
// Quick check for target if already exist to speed up process
var targetPackage as EA.Package;
targetPackage = Repository.GetPackageByGuid(guid);
if (targetPackage != null)
{
// target exists, do not do anything
Session.Output("[DEBUG] Target package \"" + name + "\" already exist");
return;
}
var paths = new Array();
var packages = new Array(paths.Count);
for (var i = 0; i < paths.Count; i++)
{
packages[i] = null;
}
paths = (parentPath).split("/");
if (paths.Count < 2)
{
Session.Output("[INFO] Skipped root or level1");
return;
}
packages[0] = selectRoot(paths[0]);
packages[1] = selectPackage(packages[0], paths[1]);
if (packages[1] == null)
{
Session.Output("[ERROR] Cannot find " + paths[0] + "/" + paths[1] + "in Project Browser");
return;
}
for (var j = 2; j < paths.length; j++)
{
packages[j] = selectPackage(packages[j - 1], paths[j]);
if (packages[j] == null)
{
Session.Output("[DEBUG] Creating " + packages[j].Name);
// create the parent package
var parent as EA.Package;
parent = Repository.GetPackageByGuid(packages[j-1].PackageGUID);
packages[j] = parent.Packages.AddNew(paths[j], "");
packages[j].Update();
parent.Update();
parent.Packages.Refresh();
break;
}
}
// Check if name (package to import) already exist or not
var targetPackage = selectPackage(packages[paths.length - 1], name);
if (targetPackage == null)
{
if (xmlPath == "")
{
Session.Output("[DEBUG] Creating " + name);
// The package is not SVN controlled
var newPackage as EA.Package;
newPackage = packages[paths.length - 1].Packages.AddNew(name,"");
Session.Output("New GUID = " + newPackage.PackageGUID);
newPackage.Update();
packages[paths.length - 1].Update();
packages[paths.length - 1].Packages.Refresh();
}
else
{
// The package is not SVN controlled
Session.Output("[DEBUG] Need to import: " + svnPath + xmlPath);
var project as EA.Project;
project = Repository.GetProjectInterface;
var result;
Session.Output("GUID = " + packages[paths.length - 1].PackageGUID);
Session.Output("GUID XML = " + project.GUIDtoXML(packages[paths.length - 1].PackageGUID));
Session.Output("XMI file = " + svnPath + xmlPath);
result = project.ImportPackageXMI(project.GUIDtoXML(packages[paths.length - 1].PackageGUID), svnPath + xmlPath, 1, 0);
Session.Output(result);
packages[paths.length - 1].Update();
packages[paths.length - 1].Packages.Refresh();
}
}
else
{
// target exists, do not do anything
Session.Output("[DEBUG] Target package \"" + name + "\" already exist");
}
}
function selectPackage(thePackage, childName)
{
var childPackage as EA.Package;
childPackage = null;
if (thePackage == null)
return null;
for (var i = 0; i < thePackage.Packages.Count; i++)
{
childPackage = thePackage.Packages.GetAt(i);
if (childPackage.Name == childName)
{
Session.Output("[DEBUG] Found " + childName);
return childPackage;
}
}
Session.Output("[DEBUG] Cannot find " + childName);
return null;
}
function selectRoot(rootName)
{
for (var y = 0; y < Repository.Models.Count; y++)
{
root = Repository.Models.GetAt(y);
if (root.Name == rootName)
{
return root;
}
}
return null;
}
main();