Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SELECT core.executeJavaUpgradeCode('addRowIdToProvisionedDataClassTables');
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EXEC core.executeJavaUpgradeCode 'addRowIdToProvisionedDataClassTables';
147 changes: 144 additions & 3 deletions experiment/src/client/test/integration/DataClassCrud.ispec.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {
ExperimentCRUDUtils,
generateFieldName,
generateDomainName,
getEscapedNameExpression,
hookServer,
RequestOptions,
Expand All @@ -12,6 +12,7 @@ import {
checkLackDesignerOrReaderPerm,
createSource,
deleteSourceType,
generateFieldNameForImport,
getDataClassRowIdByName,
initProject,
verifyRequiredLineageInsertUpdate,
Expand Down Expand Up @@ -518,7 +519,7 @@ describe('Multi Value Text Choice', () => {
});

const dataType = 'MVTCReq Source Type';
const fieldName = generateFieldName();
const fieldName = generateFieldNameForImport();
const fieldNameInExpression = getEscapedNameExpression((fieldName));
console.log("Selected Required MVTC dataclass name: " + dataType + ", field name: " + fieldName);

Expand Down Expand Up @@ -818,4 +819,144 @@ describe('Multi Value Text Choice', () => {

});

});
});

describe('Data CRUD', () => {

it ("Update using different key fields", async () => {
const dataType = generateDomainName(3) + "UpdateKeyFields";
const fieldName = generateFieldNameForImport();
console.log("Selected dataclass name: " + dataType + ", field name: " + fieldName);

// create data class with one field
await server.post('property', 'createDomain', {
kind: 'DataClass',
domainDesign: { name: dataType, fields: [{ name: fieldName }] },
options: { name: dataType }
}, { ...topFolderOptions, ...designerReaderOptions }).expect(successfulResponse);

// insert 2 rows data, provide explicit names and a rowId = 1
const dataName1 = 'KeyData1';
const dataName2 = 'KeyData2';
const inserted = await insertDataClassData([
{ name: dataName1, description: 'original1', [fieldName]: 'val1', rowId: 1 },
{ name: dataName2, description: 'original2', [fieldName]: 'val2', rowId: 1 },
], dataType, topFolderOptions);

// verify both rows are inserted with correct name and rowId is not 1 for both rows, record the rowId and lsid for both rows
expect(inserted[0].name).toBe(dataName1);
expect(inserted[1].name).toBe(dataName2);
expect(inserted[0].rowId).not.toBe(1);
expect(inserted[1].rowId).not.toBe(1);
const row1RowId = inserted[0].rowId;
const row1Lsid = inserted[0].lsid;
const row2RowId = inserted[1].rowId;
const row2Lsid = inserted[1].lsid;

const findRow = (rows: any[], rowId: number) => rows.find(r => caseInsensitive(r, 'RowId') === rowId);

// update description and fieldName value for both rows using rowId as key, verify update is successful and data are updated correctly
await ExperimentCRUDUtils.updateRows(server, [
{ rowId: row1RowId, description: 'updByRowId1', [fieldName]: 'rowIdVal1' },
{ rowId: row2RowId, description: 'updByRowId2', [fieldName]: 'rowIdVal2' },
], 'exp.data', dataType, topFolderOptions, editorUserOptions);

let rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions);
let row1 = findRow(rows, row1RowId);
let row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'description')).toBe('updByRowId1');
expect(caseInsensitive(row1, fieldName)).toBe('rowIdVal1');
expect(caseInsensitive(row2, 'description')).toBe('updByRowId2');
expect(caseInsensitive(row2, fieldName)).toBe('rowIdVal2');

// update description and fieldName value for both rows using lsid as key, verify update is successful and data are updated correctly
await ExperimentCRUDUtils.updateRows(server, [
{ lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' },
{ lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' },
], 'exp.data', dataType, topFolderOptions, editorUserOptions);

rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions);
row1 = findRow(rows, row1RowId);
row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'description')).toBe('updByLsid1');
expect(caseInsensitive(row1, fieldName)).toBe('lsidVal1');
expect(caseInsensitive(row2, 'description')).toBe('updByLsid2');
expect(caseInsensitive(row2, fieldName)).toBe('lsidVal2');

// update description and fieldName value, one of the row use lsid as key, the other use rowId, verify update is successful and data are updated correctly
await ExperimentCRUDUtils.updateRows(server, [
{ lsid: row1Lsid, description: 'updMixed1', [fieldName]: 'mixedVal1' },
{ rowId: row2RowId, description: 'updMixed2', [fieldName]: 'mixedVal2' },
], 'exp.data', dataType, topFolderOptions, editorUserOptions);

rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions);
row1 = findRow(rows, row1RowId);
row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'description')).toBe('updMixed1');
expect(caseInsensitive(row1, fieldName)).toBe('mixedVal1');
expect(caseInsensitive(row2, 'description')).toBe('updMixed2');
expect(caseInsensitive(row2, fieldName)).toBe('mixedVal2');

// update names of both rows using lsid as key, verify update is successful and names are updated correctly
const newName1 = 'RenamedByLsid1';
const newName2 = 'RenamedByLsid2';
await ExperimentCRUDUtils.updateRows(server, [
{ lsid: row1Lsid, name: newName1 },
{ lsid: row2Lsid, name: newName2 },
], 'exp.data', dataType, topFolderOptions, editorUserOptions);

rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, 'RowId,Name', topFolderOptions, adminOptions);
row1 = findRow(rows, row1RowId);
row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'Name')).toBe(newName1);
expect(caseInsensitive(row2, 'Name')).toBe(newName2);

// update names of both rows using rowId as key, verify update is successful and names are updated correctly
const newName3 = 'RenamedByRowId1';
const newName4 = 'RenamedByRowId2';
await ExperimentCRUDUtils.updateRows(server, [
{ rowId: row1RowId, name: newName3 },
{ rowId: row2RowId, name: newName4 },
], 'exp.data', dataType, topFolderOptions, editorUserOptions);

rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, 'RowId,Name', topFolderOptions, adminOptions);
row1 = findRow(rows, row1RowId);
row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'Name')).toBe(newName3);
expect(caseInsensitive(row2, 'Name')).toBe(newName4);

// update description and fieldName value from Import with update, the import columns contains name field, verify update is successful and data are updated correctly
const importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2';
const updateResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, 'UPDATE', topFolderOptions, editorUserOptions);
expect(updateResp.text.indexOf('"success" : true') > -1).toBeTruthy();

rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions);
row1 = findRow(rows, row1RowId);
row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'description')).toBe('importUpd1');
expect(caseInsensitive(row1, fieldName)).toBe('importVal1');
expect(caseInsensitive(row2, 'description')).toBe('importUpd2');
expect(caseInsensitive(row2, fieldName)).toBe('importVal2');

// update description and fieldName value from Import with merge. at the same time create a new data. the import columns contain name field, verify update and insert is successful
const newDataName = 'MergedNewData';
const importMergeText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\tmergeUpd1\tmergeVal1\n' + newName4 + '\tmergeUpd2\tmergeVal2\n' + newDataName + '\tmergeNew\tmergeNewVal';
const mergeResp = await ExperimentCRUDUtils.importData(server, importMergeText, dataType, 'MERGE', topFolderOptions, editorUserOptions);
expect(mergeResp.text.indexOf('"success" : true') > -1).toBeTruthy();

rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions);
row1 = findRow(rows, row1RowId);
row2 = findRow(rows, row2RowId);
expect(caseInsensitive(row1, 'description')).toBe('mergeUpd1');
expect(caseInsensitive(row1, fieldName)).toBe('mergeVal1');
expect(caseInsensitive(row2, 'description')).toBe('mergeUpd2');
expect(caseInsensitive(row2, fieldName)).toBe('mergeVal2');

// verify new data was created by merge
const newDataRow = await getDataClassDataByName(newDataName, dataType, '*', topFolderOptions, adminOptions);
expect(caseInsensitive(newDataRow, 'Name')).toBe(newDataName);
expect(caseInsensitive(newDataRow, 'description')).toBe('mergeNew');
expect(caseInsensitive(newDataRow, fieldName)).toBe('mergeNewVal');
});

});
14 changes: 14 additions & 0 deletions experiment/src/client/test/integration/utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {
ExperimentCRUDUtils,
generateFieldName,
IntegrationTestServer,
RequestOptions,
selectRandomN,
Expand Down Expand Up @@ -903,3 +904,16 @@ export async function insertRowsExpectError(server: IntegrationTestServer, rows:
export async function updateRowsExpectError(server: IntegrationTestServer, rows: any[], schemaName: string, queryName: string, error: string, folderOptions: RequestOptions, userOptions: RequestOptions) {
return insertRowsExpectError(server, rows, schemaName, queryName, error, folderOptions, userOptions, true);
}

export function canNameBeUsedInImport(name: string) {
return name.indexOf(',') === -1 && name.indexOf('\t') === -1 && name.indexOf('"') === -1 && name.indexOf('\n') === -1;
}

export function generateFieldNameForImport(length: number = 10, charset?: string) {
let fieldName = generateFieldName(length, charset);
while (!canNameBeUsedInImport(fieldName))
{
fieldName = generateFieldName();
}
return fieldName;
}
2 changes: 1 addition & 1 deletion experiment/src/org/labkey/experiment/ExperimentModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ public String getName()
@Override
public Double getSchemaVersion()
{
return 26.002;
return 26.003;
}

@Nullable
Expand Down
145 changes: 145 additions & 0 deletions experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.labkey.api.data.ContainerManager;
import org.labkey.api.data.DbSchema;
import org.labkey.api.data.DbScope;
import org.labkey.api.data.DeferredUpgrade;
import org.labkey.api.data.JdbcType;
import org.labkey.api.data.Parameter;
import org.labkey.api.data.ParameterMapStatement;
Expand Down Expand Up @@ -64,6 +65,9 @@
import org.labkey.api.security.roles.SiteAdminRole;
import org.labkey.api.settings.AppProps;
import org.labkey.api.util.logging.LogHelper;
import org.labkey.experiment.api.DataClass;
import org.labkey.experiment.api.DataClassDomainKind;
import org.labkey.experiment.api.ExpDataClassImpl;
import org.labkey.experiment.api.ExpSampleTypeImpl;
import org.labkey.experiment.api.ExperimentServiceImpl;
import org.labkey.experiment.api.MaterialSource;
Expand All @@ -74,6 +78,7 @@
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
Expand Down Expand Up @@ -509,4 +514,144 @@ public static void fixContainerForMovedSampleFiles(ModuleContext context)
}
}

/**
* Called from exp-26.002-26.003.sql
* Drop the classid column and add a rowId column to existing provisioned DataClass tables.
*/
@SuppressWarnings("unused")
@DeferredUpgrade
public static void addRowIdToProvisionedDataClassTables(ModuleContext context)
{
if (context.isNewInstall())
return;

try (DbScope.Transaction tx = ExperimentService.get().ensureTransaction())
{
TableInfo source = ExperimentServiceImpl.get().getTinfoDataClass();
new TableSelector(source, null, null).stream(DataClass.class)
.map(ExpDataClassImpl::new)
.forEach(ExperimentUpgradeCode::upgradeProvisionedDataClassTable);

tx.commit();
}
}

private static void upgradeProvisionedDataClassTable(ExpDataClassImpl dc)
{
Domain domain = dc.getDomain();
DataClassDomainKind kind = null;
try
{
kind = (DataClassDomainKind) domain.getDomainKind();
}
catch (IllegalArgumentException e)
{
// pass
}
if (null == kind || null == kind.getStorageSchemaName())
return;

DbSchema schema = DataClassDomainKind.getSchema();
DbScope scope = schema.getScope();

StorageProvisioner storageProvisioner = StorageProvisioner.get();

storageProvisioner.ensureStorageTable(domain, kind, scope);
domain = PropertyService.get().getDomain(domain.getTypeId());
assert (null != domain && null != domain.getStorageTableName());

SchemaTableInfo provisionedTable = schema.getTable(domain.getStorageTableName());
if (provisionedTable == null)
{
LOG.error("DataClass '" + dc.getName() + "' (" + dc.getRowId() + ") has no provisioned table.");
return;
}

// Drop classid column if present
String classIdColumnName = "classid";
ColumnInfo classIdColumn = provisionedTable.getColumn(FieldKey.fromParts(classIdColumnName));
if (classIdColumn != null)
{
Set<String> indicesToRemove = new HashSet<>();
for (var index : provisionedTable.getAllIndices())
{
if (index.columns().contains(classIdColumn))
indicesToRemove.add(index.name());
}

if (!indicesToRemove.isEmpty())
StorageProvisionerImpl.get().dropTableIndices(domain, indicesToRemove);

// Remanufacture a property descriptor that matches the original classid property descriptor.
var spec = new PropertyStorageSpec(classIdColumnName, JdbcType.INTEGER);
PropertyDescriptor pd = new PropertyDescriptor();
pd.setContainer(dc.getContainer());
pd.setDatabaseDefaultValue(spec.getDefaultValue());
pd.setName(spec.getName());
pd.setJdbcType(spec.getJdbcType(), spec.getSize());
pd.setNullable(spec.isNullable());
pd.setMvEnabled(spec.isMvEnabled());
pd.setPropertyURI(DomainUtil.createUniquePropertyURI(domain.getTypeURI(), null, new CaseInsensitiveHashSet()));
pd.setDescription(spec.getDescription());
pd.setImportAliases(spec.getImportAliases());
pd.setScale(spec.getSize());
DomainPropertyImpl dp = new DomainPropertyImpl((DomainImpl) domain, pd);

LOG.debug("Dropping classid column from table '{}' for data class '{}' in folder {}.", provisionedTable.getName(), dc.getName(), dc.getContainer().getPath());
StorageProvisionerImpl.get().dropProperties(domain, Set.of(dp));
}

// Add rowId column if not present
ColumnInfo rowIdCol = provisionedTable.getColumn("rowId");
if (rowIdCol != null)
{
LOG.info("DataClass '{}' ({}) already has 'rowId' column. Skipping.", dc.getName(), dc.getRowId());
return;
}

PropertyStorageSpec rowIdProp = new PropertyStorageSpec("rowId", JdbcType.INTEGER); // nullable for initial add
storageProvisioner.addStorageProperties(domain, Collections.singletonList(rowIdProp), true);
LOG.info("DataClass '{}' ({}) added 'rowId' column", dc.getName(), dc.getRowId());

// Populate rowId from exp.data using lsid join
fillRowId(dc, domain, scope);

// Set NOT NULL constraint
SqlExecutor executor = new SqlExecutor(scope);
boolean isPostgreSQL = scope.getSqlDialect().isPostgreSQL();
if (isPostgreSQL)
executor.execute(new SQLFragment("ALTER TABLE expdataclass.").append(domain.getStorageTableName()).append(" ALTER COLUMN rowId SET NOT NULL"));
else
executor.execute(new SQLFragment("ALTER TABLE expdataclass.").append(domain.getStorageTableName()).append(" ALTER COLUMN rowId INT NOT NULL"));

// Add indexes back via StorageProvisioner
storageProvisioner.ensureTableIndices(domain);
LOG.info("DataClass '{}' ({}) added unique index on 'rowId'", dc.getName(), dc.getRowId());

// Add FK constraint (no StorageProvisioner API for FKs on existing tables)
String fkName = "fk_rowid_" + domain.getStorageTableName() + "_data";
executor.execute(new SQLFragment("ALTER TABLE expdataclass.").append(domain.getStorageTableName())
.append(" ADD CONSTRAINT ").append(fkName)
.append(" FOREIGN KEY (rowId) REFERENCES exp.Data(RowId)"));
LOG.info("DataClass '{}' ({}) added FK constraint on 'rowId'", dc.getName(), dc.getRowId());
}

private static void fillRowId(ExpDataClassImpl dc, Domain domain, DbScope scope)
{
String tableName = domain.getStorageTableName();
SQLFragment update = new SQLFragment()
.append("UPDATE expdataclass.").append(tableName).append("\n")
.append("SET rowId = i.rowid\n")
Copy link

Copilot AI Feb 20, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's a case inconsistency in the column reference. Line 644 sets rowId = i.rowid (lowercase), but line 646 selects d.RowId (mixed case). This should be consistent. Use d.RowId on line 644 to match the column name used in the SELECT.

Suggested change
.append("SET rowId = i.rowid\n")
.append("SET rowId = i.RowId\n")

Copilot uses AI. Check for mistakes.
.append("FROM (\n")
.append(" SELECT d.lsid, d.RowId\n")
.append(" FROM exp.data d\n")
.append(" WHERE d.cpasType = ?\n").add(domain.getTypeURI())
.append(") AS i\n")
.append("WHERE i.lsid = ").append(tableName).append(".lsid");
Comment on lines +642 to +650
Copy link

Copilot AI Feb 20, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The UPDATE...FROM syntax used here is PostgreSQL-specific and will fail on SQL Server. SQL Server requires the table alias to be used in the UPDATE clause. The UPDATE statement should be database-specific. For SQL Server, use: UPDATE t SET t.rowId = i.rowid FROM expdataclass.tableName t INNER JOIN ..., while PostgreSQL uses: UPDATE expdataclass.tableName SET rowId = i.rowid FROM ... WHERE ....

Suggested change
SQLFragment update = new SQLFragment()
.append("UPDATE expdataclass.").append(tableName).append("\n")
.append("SET rowId = i.rowid\n")
.append("FROM (\n")
.append(" SELECT d.lsid, d.RowId\n")
.append(" FROM exp.data d\n")
.append(" WHERE d.cpasType = ?\n").add(domain.getTypeURI())
.append(") AS i\n")
.append("WHERE i.lsid = ").append(tableName).append(".lsid");
SQLFragment update = new SQLFragment();
if (scope.getSqlDialect().isSqlServer())
{
// SQL Server requires the table alias in the UPDATE clause
update.append("UPDATE t\n")
.append("SET t.rowId = i.RowId\n")
.append("FROM expdataclass.").append(tableName).append(" t\n")
.append("JOIN (\n")
.append(" SELECT d.lsid, d.RowId\n")
.append(" FROM exp.data d\n")
.append(" WHERE d.cpasType = ?\n").add(domain.getTypeURI())
.append(") AS i ON i.lsid = t.lsid");
}
else
{
// PostgreSQL-style UPDATE ... FROM syntax
update.append("UPDATE expdataclass.").append(tableName).append("\n")
.append("SET rowId = i.RowId\n")
.append("FROM (\n")
.append(" SELECT d.lsid, d.RowId\n")
.append(" FROM exp.data d\n")
.append(" WHERE d.cpasType = ?\n").add(domain.getTypeURI())
.append(") AS i\n")
.append("WHERE i.lsid = ").append(tableName).append(".lsid");
}

Copilot uses AI. Check for mistakes.

int count = new SqlExecutor(scope).execute(update);
LOG.info("DataClass '{}' ({}) populated 'rowId' column, count={}", dc.getName(), dc.getRowId(), count);
}


}
Loading