You are not logged in.
Hi @ab,
I need more fields in inherited TSQLRecordHistory:
TSQLARISHistory = class(TSQLRecordHistory)
protected
fLogonName: RawUTF8;
fTableName: RawUTF8;
fRecID: TID;
published
property LogonName: RawUTF8 index 50 read fLogonName write fLogonName;
property RecID: TID read fRecID write fRecID;
property TableName: RawUTF8 index 50 read fTableName write fTableName;
end;
and I have modified mORMot.pas in this way:
interface
...
type
...
TSQLRestServer = class(TSQLRest)
protected
...
function MainEngineUpdateFieldIncrement(TableModelIndex: integer; ID: TID;
const FieldName: RawUTF8; Increment: Int64): boolean; virtual; abstract;
/// Fields serialization for history tracking changes
// - Fields: name value pairs for JSON serialization.
// - JSON: JSON serialized fields
procedure DoHistoryFieldsSerialization(const Fields: array of const; var JSON: RawUTF8); virtual;
...
end;
implementation
...
procedure TSQLRestServer.DoHistoryFieldsSerialization(const Fields: array of const; var JSON: RawUTF8);
begin
JSON := JSONEncode(Fields);
end;
function TSQLRestServer.InternalUpdateEvent(aEvent: TSQLEvent; aTableIndex: integer;
aID: TID; const aSentData: RawUTF8; aIsBlobFields: PSQLFieldBits): boolean;
procedure DoTrackChanges;
var TableHistoryIndex: integer;
JSON: RawUTF8;
Event: TSQLHistoryEvent;
begin
case aEvent of
seAdd: Event := heAdd;
seUpdate: Event := heUpdate;
seDelete: Event := heDelete;
else exit;
end;
TableHistoryIndex := fTrackChangesHistoryTableIndex[aTableIndex];
fAcquireExecution[execORMWrite].Safe.Lock; // avoid race condition
try // low-level Add(TSQLRecordHistory) without cache
DoHistoryFieldsSerialization(['ModifiedRecord',aTableIndex+aID shl 6,'Event',ord(Event),
'SentDataJSON',aSentData,'TimeStamp',ServerTimeStamp], JSON); // <<--- call new procedure
fAcquireExecution[execORMWrite].fSafe.Lock;
try // may be within a batch in another thread
EngineAdd(TableHistoryIndex,JSON);
finally
fAcquireExecution[execORMWrite].fSafe.Unlock;
end;
{ TODO: use a BATCH (in background thread) to speed up TSQLHistory storage }
if fTrackChangesHistory[TableHistoryIndex].CurrentRow>
fTrackChangesHistory[TableHistoryIndex].MaxSentDataJsonRow then begin
// gather & compress TSQLRecordHistory.SentDataJson into History BLOB
TrackChangesFlush(TSQLRecordHistoryClass(Model.Tables[TableHistoryIndex]));
fTrackChangesHistory[TableHistoryIndex].CurrentRow := 0;
end else
// fast append as JSON until reached MaxSentDataJsonRow
inc(fTrackChangesHistory[TableHistoryIndex].CurrentRow);
finally
fAcquireExecution[execORMWrite].Safe.UnLock;
end;
end;
begin
...
end;
and I implemented DoHistorySerialization in my code:
...
procedure TARISRestServer.DoHistoryFieldsSerialization(const Fields: array of const; var JSON: RawUTF8);
var
lFields: TDocVariantData;
begin
lFields.InitObject(Fields);
lFields.AddValue('LogonName',ServiceContext.Request.SessionUserName);
lFields.AddValue('RecID',lFields.Value['ModifiedRecord'] shr 6);
lFields.AddValue('TableName',fModel.Tables[lFields.I['ModifiedRecord'] and 63].SQLTableName);
JSON := lFields.ToJSON;
end;
...
and all works OK !!!
Are you agree with this modification and can you apply them ?
Thanks in advance.
Esteban
Offline
Do you not agree ?
Esteban
Offline
Made more sense to me to add a virtual method at TSQLRecordHistory level.
Please check https://synopse.info/fossil/info/7d7b243077
Thanks for the proposal!
Offline
Thank you @ab, works fine !!!
Esteban
Offline
Well I overridden InitializeFields and got extra fields in the table. Works great.
But what about compression? How to save these fields during compression? Maybe it makes sense to make HistoryAdd virtual in order to be able to add additional fields? Would that be enough?
Offline