SQL - raport sesji dla n dni (10 w zapytaniu)
SELECT apis.requestdate, apis.externaluserid,
CASE
WHEN apis.status = 1 THEN 'Initiated'
WHEN apis.status = 2 THEN 'AuthenticationRequired'
WHEN apis.status = 3 THEN 'InProgress'
WHEN apis.status = 4 THEN 'Finished'
WHEN apis.status = 5 THEN 'AuthorizationFailed'
WHEN apis.status = 6 THEN 'Failed'
WHEN apis.status = 7 THEN 'DeletingConsent'
WHEN apis.status = 8 THEN 'FinishedManually'
WHEN apis.status = 9 THEN 'Expired'
ELSE 'other'
END AS SessionStatus,
CASE
WHEN [stage] = 1 THEN 'Authentication'
WHEN [stage] = 2 THEN 'Accounts'
WHEN [stage] = 3 THEN 'Transactions'
WHEN [stage] = 4 THEN 'Payment'
WHEN [stage] = 5 THEN 'TransactionDetails'
WHEN [stage] = 6 THEN 'DeleteConsent'
WHEN [stage] = 7 THEN 'Expired'
ELSE ' other'
END AS Stage,
CASE
WHEN [sessionsource] = 1 THEN 'Authorize'
WHEN [sessionsource] = 2 THEN 'RefreshActiveAccounts'
WHEN [sessionsource] = 3 THEN 'Batch'
WHEN [sessionsource] = 4 THEN 'DeleteConsent'
ELSE ' other'
END AS SessionSource,
[ExecutionId],
[failuremessageid],
[accountsfetchedcount],
[accountsprocessedcount],
(SELECT NAME FROM [dbo].bank b WHERE b.id = apis.bankid) AS BankName
FROM [dbo].apisession apis
WHERE requestdate > Dateadd(day, -10, (getdate()))
ORDER BY RequestDate DESC
[Mongo] Podstawy - szukanie w logach, consenty, consent history
//consent po ID
db.getCollection('Consent').find({
"_id" : UUID("")
}).sort({
"created" : 1
})
//consent po ibanie
db.getCollection('Consent').find({
"content.access.accounts.iban" : "NL67KNAB0256523541"
}).sort({
"created" : 1
})
//consent po swift
db.getCollection('Consent').find({
"bankSwiftCode" : "SWIFT"
}).sort({
"created" : -1
})
//historia consenta po consent id
db.getCollection('ConsentHistoryItem').find({
"additionalInformation.consentId" : UUID("")}
).sort({
"timestamp" : -1
})
//logi w przedziale pomiędzy timestampami plus execution id/psd2r w message (logi ze strzałów do api banku)/ tylko błędy
db.Log_20210000.find({
$and: [
{"date" : {$gte: ISODate("2021-00-00T00:00:00.000Z")}},
{"date" : {$lte: ISODate("2021-00-00T00:59:59.999Z")}},
// {"executionData.id" : ""},
// {"message" : /psd2r/i},
// {"level" : /warn|error/i}
]
}).sort({
"date" : 1})
SQL - sprawdzenie listy transakcji (paymenty, rozbieżność czasu miedzy kontenerami)
WITH payment_timestaps_compare
AS
(
SELECT ph.id,ph.PaymentId,ph.Command,ph.timestamp,
ROW_NUMBER() OVER (PARTITION BY paymentid ORDER BY timestamp ) row_num,
CAST(ph.Timestamp AS date) AS date_to_short
FROM payment.PaymentHistory ph
)
SELECT * FROM
(
SELECT
ptc.Id
,ptc.PaymentId
,ptc.Command
,ptc.row_num
,ptc.date_to_short
,IIF (ptc.Command = 'CreatePayment' AND ptc.row_num = 1 , 1 , 0) AS check_true -- 0 is wrong
FROM payment_timestaps_compare ptc
WHERE ptc.row_num = 1 and date_to_short > '2021-10-12' --#change date to check payments from specific timestamps
) AS show_payment_strange_timestamps
WHERE check_true = 0
ORDER BY date_to_short
Podliczenie liczby logów per level
//db.getCollection('Log_KBC_Staging_20200922').find({level:'Info'}).sort({date:-1})
db.getCollection('Log_KBC_Staging_20200922').aggregate(
[
{
$match: { $or: [
{
level: 'Error',
exception: { $exists: false }
},
{
level: 'Warn',
exception: { $exists: false }
},
{
level: 'Info',
exception: { $exists: false }
},
{
level: 'Debug',
exception: { $exists: false }
},
{
level: 'Trace',
exception: { $exists: false }
}
]
}
},
{
$group: {
_id: { level: '$level' },
elementCount: { $sum: 1 }
}
},
{
$project: {
numberOfDocs: '$elementCount'
}
},
],
{ allowDiskUse: true }
).toArray()Czas odpowiedzi z listą rachunków klienta
db.getCollection(‘NAZWAKOLEKCJI’).aggregate(
[
{
$match: { $or: [
{
message: /api\/v2\/bankaccount$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2\/bankaccount\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2\/bankaccount$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2\/bankaccount\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 }
}
},
{
$project: {
executionId: '$_id.executionId',
name: '$name',
numberOfDocs: '$elementCount',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$match: {
numberOfDocs : { $gt: 1}
}
},
{
$sort: { numberOfDocs: 1 }
}
],
{ allowDiskUse: true }
).toArray()
Czas trwania odpowiedzi na nasze Authorize (pierwszy POST)
db.getCollection('BE_Log_KBC_Staging_20200924').aggregate(
[
{
$match: { $or: [
{
message: /api\/v2.*authorize$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2\/bankaccount\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: 'IGetConsentAuthorizationUrls Send',
exception: { $exists: false },
logger: 'LogEvent'
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 },
documents: { $push: "$$ROOT" }
}
},
{
$project: {
executionId: '$_id.executionId',
name: '$name',
numberOfDocs: '$elementCount',
documents : '$documents',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$match: {
numberOfDocs : { $gt: 2}
}
},
{
$sort: { numberOfDocs: 1 }
}
],
{ allowDiskUse: true }
)To samo co wyzej, ale z limitem czasowym
var cursor=db.getCollection('Log_20201202').aggregate(
[
{
$match: { $or: [
{
message: /api\/v2.*authorize$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2\/bankaccount\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: 'IGetConsentAuthorizationUrls Send',
exception: { $exists: false },
logger: 'LogEvent'
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 },
documents: { $push: "$$ROOT" }
}
},
{
$project: {
executionId: '$_id.executionId',
name: '$name',
numberOfDocs: '$elementCount',
documents : '$documents',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$match: {
numberOfDocs : { $gt: 2},
when: { $gte:ISODate("2020-12-02 08:00:09.000Z"), $lt:ISODate("2020-12-02 10:30:35.000Z") }
}
},
{
$sort: { numberOfDocs: 1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Czas trwania sesji odświeżania
db.getCollection('BE_Log_KBC_Production_20200924').aggregate(
[
{
$match: { $or: [
{
message: /\/api\/v2\/bankaccount$/i,
logger: 'Next',
'details.url': /refreshActiveAccounts=TRUE/i
},
{
message: /\/api\/v2\/bankaccount\/$/i,
logger: 'Next',
'details.url': /refreshActiveAccounts=TRUE/i
},
{
message: 'IFetchFinished Consume'
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 }
}
},
{
$project: {
executionId: '$_id.executionId',
name: '$name',
numberOfDocs: '$elementCount',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$match: {
numberOfDocs : { $gt : 2}
}
},
{
$sort: { numberOfDocs: 1 }
}
],
{ allowDiskUse: true }
)Ile w bazie mamy UUP (para Klient-Bank)
select count(*) as liczba_klientów, l.w_ilu_bankach_konto from (
select t.CompanyId, count(*) as w_ilu_bankach_konto from (
select distinct ba.CompanyId, b.Name from BankAccount ba join bank b on ba.bankid=b.id) t
group by t.CompanyId) l
group by l.w_ilu_bankach_konto
Typy błędów per bank (v.127+)
db.getCollection('BE_Log_KBC_Staging_20200928').aggregate(
[
{$match:{ $or: [
{
message: /:call$/i,
level: 'Warn'
},
{
message: /:call$/i,
level: 'Error'
}
]
}
},
{$group:{
_id: { bankId: '$details.bank.id', bankSwiftCode: '$details.bank.swiftCode', call: '$details.call', body: '$details.response.content' },
name: { $min: "$message" },
elementCount: { $sum: 1 },
}
},
{
$project: {
bankId : '$_id.bankId',
bankSwiftCode: '$_id.bankSwiftCode',
name: '$name',
numberOfDocs: '$elementCount',
rspBody: '$_id.body',
}
}
],{ allowDiskUse: true }
)Analiza response TransactionDetails (v.122)
var cursor=db.getCollection('Log_PKO_Staging_20200929').aggregate(
[
{$match:{ $or: [
{
message: /\/getTransactionDetail:response$/i,
level: 'Warn'
},
{
message: /\/getTransactionDetail:call$/i,
level: 'Info'
}
]
}
},
{$group:{
_id: { correlation: '$details.correlationId', execution: '$executionData.id'},
name: { $max: "$message" },
rsp_body: {$push: '$details.content'},
baseUrl: {$max: "$details.call.baseUrl"}
}
},
{
$project: {
name: '$name',
correlation: '$ _id.correlation',
execution: '$ _id.execution',
baseUrl: '$baseUrl',
responses: '$rsp_body'
}
},
{$match: {
$or: [
{
name: /\/getTransactionDetail:response$/i,
}
]
}
}
],{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Analiza błędnych odpowiedzi dla wszystkich zapytań (v.122)
var cursor=db.getCollection('Log_KBC_Staging_20201028').aggregate(
[
{$match:{ $or: [
{
message: /PSD2RestApiCaller.Call/i, message: /:call/i, level: /Info/
},
{
message: /PSD2RestApiCaller.Call/i, message: /:response/i, level: /Warn|Error/
}]
}
},
{$group:{
_id: { correlation: '$details.correlationId', execution: '$executionData.id'},
name: { $max: "$message" },
rsp_body: {$push: '$details.content'},
baseUrl: {$max: "$details.call.baseUrl"}
}
},
{
$project: {
name: '$name',
correlation: '$ _id.correlation',
execution: '$ _id.execution',
baseUrl: '$baseUrl',
responses: '$rsp_body'
}
},
{$match: {
$or: [
{
name: /:response$/i
}
]
}
}
],{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Czas odpowiedzi per call do naszego API
var cursor=db.getCollection('Log_PKO_Staging_20200901').aggregate(
[
{
$match: { $or: [
{
message: /api\/v2.*authorize$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*authorize\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*authorize$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*authorize\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*bankaccount$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*bankaccount\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*bankaccount\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*bankaccount\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*bankaccount$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*bankaccount\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: 'IGetConsentAuthorizationUrls Send',
exception: { $exists: false },
logger: 'LogEvent'
},
{
message: /api\/v2.*status$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*status\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*status\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*status\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*status$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*status\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*details$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*details\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*details\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*details\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*details$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*details\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*sync$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*sync\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*sync\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*sync\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*sync$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*sync\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*transaction$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*transaction\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*transaction\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*transaction\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*transaction$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*transaction\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*delete$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*delete\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*delete\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*delete\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*delete$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*delete\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*consent$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*consent\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*consent\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*consent\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*consent$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*consent\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*user$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*user\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*user\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*user\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*user$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*user\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*country$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*country\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*country\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*country\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*country$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*country\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*callback$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*callback\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*callback\//i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*callback\//i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*callback$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2.*callback\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 },
documents: { $push: "$$ROOT" }
}
},
{
$project: {
executionId: '$_id.executionId',
name: '$name',
numberOfDocs: '$elementCount',
documents : '$documents',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$match: {
numberOfDocs : { $gt: 1}
}
},
{
$sort: { name: 1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Skrypt SQL określający ilość klientów w LDS z podpiętym rachunkiem w danym banku
select COM.ExternalId, BI.[Value], BankAccountIdentifier.[Value],
(select count(*) from BankAccount
join BankIdentifier on BankACcount.BankId=BankIdentifier.BankId
where Companyid = COM.id and BankIdentifier.Value <> 'SWIFTCODE') as Accounts,
case when (select count(*) from BankAccount
join BankIdentifier on BankACcount.BankId=BankIdentifier.BankId
where Companyid = COM.id and BankIdentifier.Value <> 'SWIFTCODE') > 0 then 'TAK' else 'NIE' END as CzyKontaInnychBankow
from Company as COM
join BankACcount as BA on COM.id = BA.CompanyId
join BankIdentifier as BI on BI.BankId = BA.BankId
join BankAccountIdentifier on BankAccountIdentifier.BankAccountId = BA.id
where COM.ExternalId not like '%deleted%'
and BI.[Value] = 'SWIFTCODE'
and BankAccountIdentifier.SchemeId = 1
HSM - czasy odpowiedzi HSM w sekundach
Zapytanie pokazuje czasy odpowiedzi HSM w sekundach. Wyniki posortowane są malejąco. W miejsce [NAZWA KOLEKCJI] należy umieścić odpowiednią kolekcję.
DBQuery.shellBatchSize=500;
db.getCollection('[NAZWA KOLEKCJI]').aggregate([
{
$match:
{'message': /hsm/i ,'details.correlationId':{$exists:true}}
},
{
$group: {
_id: '$details.correlationId',
maxDate: { $max: '$date' },
minDate: { $min: '$date' },
count: { $sum: 1 }
}
},
{
$project:{
'when':'$minDate',
'duration': { $divide: [{ $subtract:[ '$maxDate', '$minDate' ] }, 1000] }
}
},
{
$sort: { duration:-1 }
}
])
LOG Count per Hour
var cursor=db.getCollection('Log_20211118').aggregate(
[
{
$group:
{
_id:
{
hour: { $hour: "$date"}
},
count: { $sum:1 },
date: { $first: "$date" }
}
},
{
$project:
{
hour:
{
$hour: "$date"
},
date:
{
$dateToString: { format: "%Y-%m-%d", date: "$date" }
},
count: 1,
_id: 0
}
},
{
$sort: { hour: -1 }
}
],{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Czas odpowiedzi APIHUBA an bankAccount z podziałem na refresh i nie refresh
var cursor=db.getCollection('Log_PKO_Staging_20201211').aggregate(
[
{
$match: { $or: [
{
message: /api\/v2\/bankaccount$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2\/bankaccount\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2\/bankaccount$/i,
exception: { $exists: false },
logger: 'LogResponse'
},
{
message: /api\/v2\/bankaccount\/$/i,
exception: { $exists: false },
logger: 'LogResponse'
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
refresh: { $max: "$details.arguments.RefreshActiveAccounts" },
status: { $max: "$details.headers.Actual-Response-Status" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 }
}
},
{
$project: {
executionId: '$_id.executionId',
refresh: '$refresh',
status: '$status',
name: '$name',
numberOfDocs: '$elementCount',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$match: {
numberOfDocs : { $gt: 1},
status: '200'
}
},
{
$sort: { howLong: -1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Zestawienie userów i godzin wywołania metody DELETE bankacocunt
var cursor=db.getCollection('Log_20210122').aggregate(
[
{
$match: { $or: [
{
message: /api\/v2.*bankAccount$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*bankAccount\/$/i,
exception: { $exists: false },
logger: 'Next'
},
{
message: /api\/v2.*bankAccount\//i,
exception: { $exists: false },
logger: 'Next'
}
]
}
},
{
$group: {
_id: { userId: '$executionData.context.userId' },
name: { $min: "$message" },
date: { $min: "$date" },
method: { $min: '$details.method' }
}
},
{
$project: {
userId: '$_id.userId',
method: '$method',
message: '$name',
when: '$date'
}
},
{
$match: {
method: 'DELETE'
}
},
{
$sort: { when: -1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}NOWE-API: Czasy z machine name
var cursor=db.getCollection('Log_20210216').aggregate(
[
{
$match: { $or: [
{message: /\/api\//i, logger: 'Log' }
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $max: "$message" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 },
documents: { $push: "$$ROOT" },
machine: {$max: "$machine.machineName"}
}
},
{
$project: {
executionId: '$_id.executionId',
machine: '$machine',
name: '$name',
numberOfDocs: '$elementCount',
documents : '$documents',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$max", "$min" ]}, 1000]}
}
},
{
$sort: { name: 1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}NOWE-API: Szybkie odpowiedzi API z błędami
db.getCollection('Log_20210215').find({
message: /\/api\//i, logger: 'Log',
'details.statusCode': { $exists: true, $nin: [ 200, 201 ] } }).sort({date:-1})Zestawienie wygaśniętych zgód wraz z informacją o dacie ich stworzenia
założenia:
jakie zgody wygasły (per zgoda) w ostatnich 24 godzinach i dla każdej jaki to Bank i jaki invalidation reason
dla każdej zgody też userId
db.getCollection('Consent').find({'status':{$in:["Invalid"]},'invalidationInfo.invalidated':{$lt: new Date(),$gte: new Date(new Date().setDate(new Date().getDate()-1))}},{'_id':1,'created':1,'userId':1,'bankSwiftCode':1,'invalidationInfo':1}).sort({'invalidationInfo.invalidated':-1})Zestawienie aktywnych zgód per bank, user
założenia:
ile mamy aktywnych zgód per Klient per bank (rozkład taki) że np. w Alior mamy x userów co mają jedną aktywną zgodę, y co mają dwie aktywne etc
feature:
countOlderThan90DaysicountNewerThan90Days- rozbicie na zgody, które są wydane teraz-90dni temu i 90dni temu - dalej
var cursor=db.getCollection('Consent').aggregate(
[
{
'$match': {
'status': { '$ne': 'Invalid' },
'isDeleted': false
}
},
{
'$group': {
'_id': { 'swiftcode': '$bankSwiftCode', 'userId': '$userId' },
'countOlderThan90Days': { '$sum': { '$cond': [{ '$lte': ['$created', new Date(new Date().setDate(new Date().getDate()-90))] }, 1, 0] } },
'countNewerThan90Days': { '$sum': { '$cond': [{ '$gte': ['$created', new Date(new Date().setDate(new Date().getDate()-90))] }, 1, 0] } }
}
},
{
'$group': {
'_id': '$_id.swiftcode',
'userConsentsOlderThan90Days': { '$push': '$countOlderThan90Days' },
'userConsentsNewerThan90Days': { '$push': '$countNewerThan90Days' }
}
},
{
'$project': {
'_id': 0,
'swiftcode': '$_id',
'usersWithActiveConsentsOlderThan90Days': {
'1': { '$size': { '$filter': { 'input': '$userConsentsOlderThan90Days', 'cond': { '$eq': ['$$this', 1] } } } },
'2': { '$size': { '$filter': { 'input': '$userConsentsOlderThan90Days', 'cond': { '$eq': ['$$this', 2] } } } },
'3': { '$size': { '$filter': { 'input': '$userConsentsOlderThan90Days', 'cond': { '$eq': ['$$this', 3] } } } },
'4': { '$size': { '$filter': { 'input': '$userConsentsOlderThan90Days', 'cond': { '$eq': ['$$this', 4] } } } },
'5': { '$size': { '$filter': { 'input': '$userConsentsOlderThan90Days', 'cond': { '$eq': ['$$this', 5] } } } },
'more': { '$size': { '$filter': { 'input': '$userConsentsOlderThan90Days', 'cond': { '$gt': ['$$this', 5] } } } }
},
'usersWithActiveConsentsNewerThan90Days': {
'1': { '$size': { '$filter': { 'input': '$userConsentsNewerThan90Days', 'cond': { '$eq': ['$$this', 1] } } } },
'2': { '$size': { '$filter': { 'input': '$userConsentsNewerThan90Days', 'cond': { '$eq': ['$$this', 2] } } } },
'3': { '$size': { '$filter': { 'input': '$userConsentsNewerThan90Days', 'cond': { '$eq': ['$$this', 3] } } } },
'4': { '$size': { '$filter': { 'input': '$userConsentsNewerThan90Days', 'cond': { '$eq': ['$$this', 4] } } } },
'5': { '$size': { '$filter': { 'input': '$userConsentsNewerThan90Days', 'cond': { '$eq': ['$$this', 5] } } } },
'more': { '$size': { '$filter': { 'input': '$userConsentsNewerThan90Days', 'cond': { '$gt': ['$$this', 5] } } } }
}
}
}
]);
while(cursor.hasNext())
{
print(cursor.next());
}raw wersja pre-alpha bez ograniczenia na dni, może się przyda kiedyś komuś żeby przerobić czy coś:
var cursor=db.getCollection('Consent').aggregate(
[
{
$match: { 'status':{$not:/Invalid/i},
'isDeleted': false,
//'created' : {$gte: new Date(new Date().setDate(new Date().getDate()-2))}
}
},
{
$group: {
_id: { bankId: '$bankSwiftCode', userId: '$userId' },
consentId: { $push: "$_id" },
created: { $push: "$created" },
elementCount: { $sum: 1 },
linkedAccounts: { $push: "$linkedAccounts" }
}
},
{
$project: {
bankId: '$_id.bankId',
userId: '$_id.userId',
consentId: '$consentId',
numberOfDocs: '$elementCount',
linkedAccounts : '$linkedAccounts',
created: '$created'
}
},
{
$sort: { created: -1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}LOG count per container i hour
var cursor=db.getCollection('Log_20210527').aggregate(
[
{
$group: {
_id: { containerId: '$machine.machineName',hour: { $hour: "$date"} },
elementCount: { $sum: 1 },
date: { $first: "$date" }
}
},
{
$project: {
_id:1,
containerId: '$containerId',
numberOfDocs: '$elementCount',
hour:
{
$hour: "$date"
}
}
},
{ $sort: {hour:-1} }
],
{ allowDiskUse: true });
while(cursor.hasNext())
{
print(cursor.next());
}Wyszukanie strzałów o token kiedy mamy podany tylko IBAN klienta
var cursor=db.getCollection('Log_20210601').aggregate(
[
{$match:{ $or: [
{
message: /\/*\/token:call/i // lub /PSD2RestApiCaller.Call:\/*\/token:call/i
},
{
'details.content.content.accountNumber':'PL50102055581111145030700092'
}
]
}
},
{$group:{
_id: { execution: '$executionData.id', invoker: '$executionData.invoker'},
name: { $max: "$message" },
request: {$push: '$details.request'},
response: {$push: '$details.response'},
date: {$max: '$date'}
}
},
{
$project: {
name: '$name',
invoker: '$ _id.invoker',
execution: '$ _id.execution',
request: '$request',
response: '$response',
date: '$date',
}
},
{$match: {
$or: [
{
name: /token:call/i
}
]
}
},
{
$sort: {date: -1}
}
],{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}Wylistowanie externalId userów posortowanych malejąco po polu created i z nadanym limitem na ich ilość
var cursor=db.getCollection('User').aggregate(
[
{
$match: { 'updateOffline': false,
'identifiers.obsoleteId':{$nin:[/regression*/i]}
}
},
{
$group: {
_id: { externalId: '$identifiers.obsoleteId' },
created: {$min: '$created'}
}
},
{
$project: {
_id:0,
externalId: '$_id.externalId',
created: '$created'
}
},
{
$sort: { created: -1 }
},
{
$limit: 1100
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}[SQL] pobranie listy userów i kont bez sald
select bai.Value,com.ExternalId, bac.[Id],bac.[AccountNumber],bac.[CreateDate],bac.[BankId],bac.[IsActive],bac.[SyncFromDate],bac.[SyncLastDate],bac.[SyncStatus],bac.[AccountTypeName],bac.[BankAccountType],bac.[Guid],
(select max(date) from BankAccountBalance bab where bab.BankAccountId=bac.id) as balanceData from BankAccount bac join Company com on bac.CompanyId=com.Id join BankAccountIdentifier bai on bac.Id=bai.BankAccountId where bai.SchemeId=1 and bac.BankId in (select BankId from BankIdentifier where [value]=N'CITIPLPX')
[SQL] pobranie transakcji dla w.w. kont
select trx.[Id],trx.BankAccountId,bai.[value] as IBAN,com.externalid as [USER],trx.[AccountingDate],trx.[ReferenceNumber],trx.[TransactionStatus],trx.[EffectiveDate],trx.[RetryCount],trx.[OrdinalNumber],trx.[ExternalNumber],trx.[TransactionType],trx.[OriginalTransactionType],trx.[DetailsFetched],trx.[LastSyncDate],trx.[EndToEndId] from BankAccount bac join [Transaction] trx on trx.BankAccountId=bac.Id join BankAccountIdentifier bai on bai.BankAccountId=bac.Id join company com on com.Id=bac.CompanyId where bac.BankId in (select BankId from BankIdentifier where [value]=N'CITIPLPX') and bai.SchemeId=1
[cURL] recalculate z podaniem swifta i scope (hashcode)
curl -X POST -H 'Host: localhost' -H 'X-Tenant-Id: placeholder' -H 'Execution-id: placeholder' -H "Content-type: application/json" 'http://localhost/api/hashCode/recalculate' -d '{"SwiftCode" : "CITIPLPX","Scope" : "Transactions" }'*jak bez scope to po prostu wywalić
**do puszczenia w konkretnym KONEKTORZE
[cURL] update serwisu (migracje)
curl -XPOST -H 'Host: localhost' -H 'X-Tenant-Id: placeholder' -H 'Execution-id: placeholder' -H "Content-type: application/json" -d '{}' 'http://localhost/api/database/update'wejść do kontenera → apt update → apt install curl → y → puszczenie curla
[SQL] sprawdzenie czy userom są nadane niepoprawne guidy
select * from dbo.UserCompany where len(UserId) != 36
[CERT] podejrzenie fingerprinta z pfx’a
openssl pkcs12 -in SCIEZKA_DO_PFX.pfx -nokeys | openssl x509 -noout -fingerprint -sha256
[SQL] ilu klientów w danym banku + ile podpiętych kont
select isNUll(b.Name, '') as BankName, isNull (b.bankid,'') as BankId,
count(distinct b.ExternalId) as no_customers_using_Bank, count(distinct b.id) as no_accounts_linked
from
(
select a.externalid, a.companyid_origin, a.BankId, a.baCompanyID, a.id, a.Name,
case when a.IsBusiness =1 then 1 else NUll end as IsBusiness,
case when a.baCompanyID is not null then 1 else 0 end as bankaccountindicator
from (
select c.externalid, c.id as companyid_origin, ba.BankId, ba.CompanyId as baCompanyID, ba.id, aba.IsBusiness, b.Name from Company c left join
BankAccount ba on ba.CompanyId = c.Id left join bank b on b.id=ba.BankId left join ApiBankAccount aba on aba.id=ba.id where ba.IsActive=1 and b.id<>177
) a
) b
group by b.Name, b.bankid
[CONSUL] wylistowanie konektorów zarejestrowanych w consulu
wewnątrz kontenera consula odpalić:
1) consul catalog services --tags | grep PSD2Connector | grep -Eo '[\s,](\w+)' | cut -c 2-
LUB
2) curl http://localhost:8500/v1/catalog/services
LUB dodatkowe info
3) curl http://localhost:8500/v1/catalog/service/PSD2Connector
[mongo consent] pary (user, bank) gdzie dany user w danym banku ma >1 zgodę
var cursor=db.getCollection('Consent').aggregate(
[
{
'$match': { 'status': 'Confirmed', 'isDeleted':false}
},
{
'$group': {
'_id': { 'bankSwiftCode': '$bankSwiftCode', 'userId': '$userId' },
'count': { '$sum': 1 },
'consents': { '$push': { 'consentId': '$_id', 'created': '$created', 'variables':'$variables', 'externalConsentId':'$externalId' } }
}
},
{
'$match': { 'count': { '$gt': 1} }
}
]);
while(cursor.hasNext())
{
print(cursor.next());
}[mongo consent] invalidacja starszych consentów zwróconych wyżej z podanym invalidationInfo
// UPDATE
var consentIds = db.getCollection('Consent').aggregate(
[
{
'$match': { 'status': 'Confirmed', 'isDeleted': false, 'scope': 'Ais' }
},
{
'$group': {
'_id': { 'bankSwiftCode': '$bankSwiftCode', 'userId': '$userId' },
'count': { '$sum': 1 },
'consents': { '$min': { 'created': '$created', 'consentId': '$_id', 'variables':'$variables', 'externalConsentId':'$externalId' } }
}
},
{
'$match': { 'count': { '$gt': 1 } }
},
{
'$project': { 'consentId':'$consents.consentId', '_id': 0 }
}
]).toArray().map(function (obj) { return obj.consentId; });
// db.getCollection('Consent').find({ '_id': { '$in': consentIds } })
db.getCollection('Consent').updateMany( {'_id': { '$in': consentIds }}, { $set: {'status':'Invalid','invalidationInfo': { 'description':null, 'reason': 'duplicate invalidated', 'invalidated': ISODate() }}})
*
MongoDB group to get min value of a field & it's respective data $min always takes the minimum value of the first expression which is passed inside the object.
[mongo] podliczenie listy odpowiedzi 429/409 naszego API
db.getCollection('Log_20210914').find({$or:[{'details.statusCode':{$in:[409,429]}},{'message':/Request blocked due to quota exceeded./i}],$and:[{'machine.entryAssembly':/rest/i},{level:/Warn|Error/i}]},{_id:0,'details.statusCode':1,message:1,date:1}).sort({date:-1})jak jest message:Request blocked due to quota exceeded. to znaczy, że poszło szybko odświeżenie rachunków gościa (RAA=true) (LINK do configa)
jak jest inny message to znaczy, że zadziałał mechanizm blokowania sesji (LINK do configa)
[SQL] Ilość wystąpień mapowań dla słownika BankAccountType dla pobranych rachunków
select CASE
WHEN ba.bankaccounttype = 1 THEN 'ChargeCard'
WHEN ba.bankaccounttype = 2 THEN 'CreditCard'
WHEN ba.bankaccounttype = 3 THEN 'CurrentAccount'
WHEN ba.bankaccounttype = 4 THEN 'EMoney'
WHEN ba.bankaccounttype = 5 THEN 'Loan'
WHEN ba.bankaccounttype = 6 THEN 'Mortgage'
WHEN ba.bankaccounttype = 7 THEN 'PrePaidCard'
WHEN ba.bankaccounttype = 8 THEN 'Savings'
WHEN ba.bankaccounttype = 9 THEN 'Deposits'
ELSE 'Other'
END AS AccountType, ba.bankaccounttype, count(ba.id) as [Ilosc]
from BankAccount ba
group by ba.bankaccounttype
[SQL] sprawdzenie jaki to bank po swift
SELECT bi.Value,
bi.BankId,
b.Name,
b.OfficialName,
c.FullName AS Country_Name
FROM dbo.BankIdentifier BI
left outer join dbo.bank B
on b.Id = bi.BankId
left outer join dbo.Country C
on b.CountryId = c.Id
where bi.Value = 'SWIFTCODE' ---- PUT SWIFT
order by bi.BankId ASC
[mongo] ilu userów ma najnowsze consenty dla danego banku, młodsze niż 90 dni, a które są Invalid
db.getCollection('Consent').aggregate([{
$match: {
isDeleted: false,
// 'bankSwiftCode':'BPKOPLPW'
}
},{
$sort: {userId: 1, bankSwiftCode: 1, created: -1}
},{
$group: {
_id: {userId: "$userId", bank: "$bankSwiftCode"},
newestConsentId: { $first: "$_id"}
}
}, {
$lookup: {
from: 'Consent',
as:"newestCollection",
let:{g:"$newestConsentId"},
pipeline:[
{$match:{$expr:{$eq:["$_id","$$g"]}}
}]
}
},
{
$addFields: { newestConsent: { $first: "$newestCollection" } } },
{
$match: {
"newestConsent.status" : "Invalid",
'newestConsent.created': {$gte: new Date(new Date().setDate(new Date().getDate()-90)),}
}
},
{
$project: {
created: '$newestConsent.created',
consentId: '$newestConsent._id',
userId: '$newestConsent.userId',
bank: '$newestConsent.bankSwiftCode'
}
},
{
$group: {
_id: "$bank",
count: {$sum:1}
}
}
],
{ allowDiskUse: true })*od wersji mongo 4.4
db.getCollection('Consent').aggregate([{
$match: {
isDeleted: false,
'bankSwiftCode':'BPKOPLPW'
}
},{
$sort: {userId: 1, bankSwiftCode: 1, created: -1}
},{
$group: {
_id: {userId: "$userId", bank: "$bankSwiftCode"},
consents: { $push: "$_id"}
}
},{
$addFields: { newestConsentId: { $arrayElemAt: [ "$consents", 0 ] } }
},{
$lookup: {
from: 'Consent',
as:"newestCollection",
let:{g:"$newestConsentId"},
pipeline:[
{$match:{$expr:{$eq:["$_id","$$g"]}}
}]
}
},
{
$addFields: { newestConsent: { $arrayElemAt: [ "$newestCollection", 0 ] } }
},
{
$match: {
"newestConsent.status" : "Invalid",
'newestConsent.created': {$gte: new Date(new Date().setDate(new Date().getDate()-90)),}
}
},
{
$project: {
created: '$newestConsent.created',
consentId: '$newestConsent._id',
userId: '$newestConsent.userId',
bank: '$newestConsent.bankSwiftCode'
}
},
/* {
$group: {
_id: "$bank",
count: {$sum:1}
}
}
*/
],
{ allowDiskUse: true })*dla wersji < 4.4
[sql] sprawdzanie mapowań bankaccounttype
select distinct b.name, ba.BankAccountType,
COALESCE(
ExtendedData.value(N'(//*:AccountType//*:Code)[1]','nvarchar(max)'),
ExtendedData.value(N'(//*:accountType//*:Code)[1]','nvarchar(max)'),
ExtendedData.value(N'(//*:AccountType//*:code)[1]','nvarchar(max)'),
ExtendedData.value(N'(//*:accountType//*:code)[1]','nvarchar(max)')
) as code,
COALESCE(
ExtendedData.value(N'(//*:AccountType//*:description)[1]','nvarchar(max)'),
ExtendedData.value(N'(//*:accountType//*:description)[1]','nvarchar(max)'),
ExtendedData.value(N'(//*:AccountType//*:Description)[1]','nvarchar(max)'),
ExtendedData.value(N'(//*:accountType//*:Description)[1]','nvarchar(max)')
) as [description]
from Psd2BankAccount pba
join bank b on b.id = pba.BankId
join BankAccount ba on ba.id = pba.BankAccountId
Konwersja json na csv, xls (python)
Zainstalować trzeba dependancies, czyli:
python,
pandas,
chyba xlrd, może coś jeszcze.
Jak nie działa to do Przemysław Piechocki.
Przykładowe użycie:
PS C:\temp\json_to_xls> python .\app.py
nazwa pliku do konersji: zmongoraport_logi_jakies.txt
3 pliki na wyjsciu:
out.json (skonwertowany na json valid plik)
out.xls (skonwertowany na xls’a json)
out.csv (skonwertoany na csv json)
Uwaga: nested jsony wsadzi do jednej kolumny o nazwie jak nazwa obiektu w rootcie.
import pandas
import os
import re
#regexy
json_file = 'out.json'
xls_file = 'out.xls'
csv_file = 'out.csv'
noarr_json = 'noarr.json'
file=input("nazwa pliku do konersji: ")
with open(file, 'r' ) as f:
content = f.read()
content_new = re.sub('\/\*.*\*\/', ',', content, flags = re.M)
content_new = re.sub('ISODate\(', '', content_new, flags = re.M)
content_new = re.sub('ObjectId\(', '', content_new, flags = re.M)
content_new = re.sub('\)', '', content_new, flags = re.M)
content_new = re.sub('NumberLong\(', '', content_new, flags = re.M)
content_new = re.sub(',', '[', content_new, flags = re.M, count=1)
with open(json_file, 'w') as o:
o.write(content_new)
o.writelines(']')
with open(file, 'r' ) as f:
content = f.read()
content_new = re.sub('\/. [0-9]+ .\/', '', content, flags = re.M)
content_new = re.sub('ISODate\(', '', content_new, flags = re.M)
content_new = re.sub('ObjectId\(', '', content_new, flags = re.M)
content_new = re.sub('NumberLong\(', '', content_new, flags = re.M)
content_new = re.sub('\)', '', content_new, flags = re.M)
with open(noarr_json, 'w') as o:
o.write(content_new)
if os.path.exists(json_file):
pandas.read_json(json_file).to_excel(xls_file)
if os.path.exists(json_file):
pandas.read_json(json_file).to_csv(csv_file)
[mongo] eksport wyników zapytań do csv
Array.prototype.toCSV = function toMediocreCSV () {
const results = this;
const headers = {};
results.forEach((result) =>
Object.keys(result).forEach((key) => headers[key] = true)
)
const keys = Object.keys(headers);
print(keys.join(";"));
results.forEach((result) =>
print(keys.map((key) => result[key] != null ? result[key] : '')
.join(";"))
)
}
db.getCollection('KOLEKCJA').find({},{_id:0}).toArray().toCSV()*uwaga, kiedy występują zagnieżdżenia jak na skrinie w details to to nie przejdzie
[mongo] czasy odpowiedzi naszego api (new api)
// wersja mongo < 4.4.2
var cursor=db.getCollection('Log_20211125').find({'details.executionTime':{ $exists: true },logger:{$in:['Log','LogResponse']},message: /\/api\/v2\/.*/i,message:{$nin:[/\/internal\//i]}} , {_id:0,message:1,'executionData.id':1,'date':1,'details.executionTime':1});
while(cursor.hasNext())
{
print(cursor.next());
}
//wersja mongo >= 4.4.2
var cursor=db.getCollection('Log_20211125').find({'details.executionTime':{ $exists: true },logger:{$in:['Log','LogResponse']},message: /\/api\/v2\/.*/i,message:{$nin:[/\/internal\//i]}} , {_id:0,message:1,traceId:'$executionData.id','date':1,'durationTime': '$details.executionTime'});
while(cursor.hasNext())
{
print(cursor.next());
}
SQL - generator requesta o dzisiejsze historicalBalance po IBAN
select
ba.id,
bai.value,
concat('##{{url_tpp}}/api/user/id/',c.ExternalId,'/account/iban/', bai.value, '/currency/', cu.Code, '/historicalBalance?eod=true&date=', CONVERT(char(10), GetDate(),126)) postman
from dbo.BankAccount ba
join BankAccountIdentifier bai on bai.BankAccountId = ba.id
join company c on ba.CompanyId = c.id
join currency cu on cu.id=ba.CurrencyId
join BankAccountIdentifierScheme bais on bais.id = bai.SchemeId
where
bai.schemeid = 1 --iban
and bai.Value = 'IBAN'
order by ba.id asc
SQL - dodanie nowego banku i przypisanie do apiclienta
dodanie credit agricole do apiclientów santander i santanderPis
--if not exists (select [ExternalId] from ApiClient where [ExternalId] =N'santanderPis')
--if not exists (select [ExternalId] from ApiClient where [ExternalId] =N'santander')
--INSERT [dbo].[ApiClient] ([ExternalId]) VALUES (N'santanderPis')
--INSERT [dbo].[ApiClient] ([ExternalId]) VALUES (N'santander')
INSERT [dbo].[ApiClientBank] ([ApiClientId], [BankId], [BankVersionId], [ShowOnList]) VALUES ((select id from ApiClient where [ExternalId] =N'santanderPis'), (select bankid from BankIdentifier where Value='AGRIPLPR'), (select id from BankVersion where name='v1' and BankId=(select bankid from BankIdentifier where Value='AGRIPLPR')) , 1 )
INSERT [dbo].[ApiClientBank] ([ApiClientId], [BankId], [BankVersionId], [ShowOnList]) VALUES ((select id from ApiClient where [ExternalId] =N'santander'), (select bankid from BankIdentifier where Value='AGRIPLPR'), (select id from BankVersion where name='v1' and BankId=(select bankid from BankIdentifier where Value='AGRIPLPR')) , 1 )
SQL - przypisanie istniejącego banku do apiclienta
przypadek z gnb prod, włączenie credit i bnp
select * from ApiClient where externalid like '%gnb_prod%'
select * from ApiClientBank where ApiClientId in (select id from ApiClient where externalid like '%gnb_prod%') and bankid in (select bankid from BankIdentifier where [Value] in ('PPABPLPK','AGRIPLPR')) -- 80 bnp, 83 credit
update ApiClientBank set ShowOnList=1 where ApiClientId=14 and BankId in (select bankid from BankIdentifier where [Value] in ('PPABPLPK','AGRIPLPR'))
update ApiClientBank set ShowOnList=1 where ApiClientId=15 and BankId in (select bankid from BankIdentifier where [Value] in ('PPABPLPK','AGRIPLPR'))
SQL - Dodanie nowego banku do apiclientId z wykluczeniem
INSERT INTO ApiClientBank(
ApiClientId
,BankId
,BankVersionId
,ShowOnList
)
SELECT
DISTINCT acb.apiclientid
,285 AS BankId --Check bank ID and put here
,MAX(bv.Id) -- Check bank version after update
,1
FROM ApiClientBank acb
JOIN BankVersion bv
ON acb.BankId=bv.BankId
JOIN Psd2Provider pp
ON pp.ID=bv.Psd2ProviderId
JOIN [Bank] b
ON b.Id=bv.BankId
WHERE acb.apiclientid not in (/*except already assigned apiclientid #PK will throw exception#
or apiclientid to which bank should not be added*/)
GROUP BY acb.apiclientid
T-SQL - dodanie nowego apiclientid + lista banków na podstawie juz istniejacego apiclientid
INSERT INTO [ApiClient] (ExternalId) VALUES ('/*Nazwa nowe apiclientID*/')
----------------------------------------------------------------------------------
INSERT INTO [ApiClientBank] (ApiClientId,BankId,BankVersionId, ShowOnList)
(SELECT
(SELECT id FROM apiclient WHERE id = (SELECT IDENT_CURRENT('ApiClient'))),
acb.BankId,
acb.BankVersionId,
1
FROM [dbo].[ApiClient] ac
LEFT OUTER JOIN [ApiClientBank] acb
ON ac.id = acb.apiclientid
where acb.ApiClientId = 4
(SELECT id FROM ApiClient WHERE id =
/*id apiclientid na podstawie którego przypisana zostanie lista banków do nowego apiclientid*/))SQL - Informacja o ID sesji na podstawie zadanego warunku (userid and bank.name)
SELECT
b.Name
,b.OfficialName
,apis.*
FROM [FinatApiQA].[dbo].[ApiSession] apis
left outer join bank b
ON b.id = apis.bankid
WHERE b.name like '%mbank%'
and apis.userId = '6d535d27-1242-4d41-b8f7-9eae742cbe76'
SQL - znalezienie i usunięcie duplikatów transakcji na starym API
DECLARE @Connector varchar(100) = 'DskConnector';
with xmlnamespaces(
'http://schemas.datacontract.org/2004/07/Psd2Hub.Connectors.Dsk.ResponseModels.Account.Models' as ns,
default 'http://www.w3.org/2001/XMLSchema-instance'
),
UniqueTransactions (Id) as
(select
max(ixemel.id) as Id
from
(Select [psd2t].Id,
[psd2t].Psd2BankAccountId,
[psd2t].[Data].value('(/ns:Transactions/ns:transactionId)[1]', 'varchar(256)') as tid,
[psd2t].[Data].value('(/ns:Transactions/ns:bookingDate)[1]', 'varchar(256)') as bd,
[psd2t].[Data].value('(/ns:Transactions/ns:transactionAmount/ns:currency)[1]', 'varchar(256)') as tac,
[psd2t].[Data].value('(/ns:Transactions/ns:transactionAmount/ns:amount)[1]', 'varchar(256)') as taa,
[psd2t].[Data].value('(/ns:Transactions/ns:creditorAccount/ns:iban)[1]', 'varchar(256)') as ciban,
[psd2t].[Data].value('(/ns:Transactions/ns:creditorAccount/ns:bban)[1]', 'varchar(256)') as cbban,
[psd2t].[Data].value('(/ns:Transactions/ns:debtorAccount/ns:iban)[1]', 'varchar(256)') as diban,
[psd2t].[Data].value('(/ns:Transactions/ns:debtorAccount/ns:bban)[1]', 'varchar(256)') as dbban,
[psd2t].[Data].value('(/ns:Transactions/ns:remittanceInformationUnstructured)[1]', 'varchar(256)') as trid
from[Psd2Transaction] [psd2t]
join [ApiSession] [as] on [psd2t].[LastApiSessionId] = [as].[Id]
join [Bank] [b] on [as].[BankId] = [b].[Id]
join [Psd2Provider] [psd2p] on [b].[ProviderId] = [psd2p].[Id]
join [Psd2Connector] [psd2c] on [psd2p].[Psd2ConnectorId] = [psd2c].[Id]
where [psd2c].[ConnectorName] = @Connector
) as ixemel
group by
ixemel.tid,
ixemel.bd,
ixemel.tac,
ixemel.taa,
ixemel.ciban,
ixemel.cbban,
ixemel.diban,
ixemel.dbban,
ixemel.trid,
ixemel.Psd2BankAccountId )
select pt.Id, pt.TransactionId INTO #tempDuplicatedTransactions FROM UniqueTransactions ut
RIGHT JOIN Psd2Transaction pt ON ut.Id = pt.Id
join [ApiSession] [as] on [pt].[LastApiSessionId] = [as].[Id]
join [Bank] [b] on [as].[BankId] = [b].[Id]
join [Psd2Provider] [psd2p] on [b].[ProviderId] = [psd2p].[Id]
join [Psd2Connector] [psd2c] on [psd2p].[Psd2ConnectorId] = [psd2c].[Id]
where [psd2c].[ConnectorName] = @Connector
AND ut.Id IS NULL
select * from Psd2Transaction where id in (select Id from #tempDuplicatedTransactions) -- sprawdzenie
select * from [Transaction] where id in (select TransactionId from #tempDuplicatedTransactions) -- sprawdzenie
begin transaction;
delete from Consent where Psd2TransactionId in (select Id from #tempDuplicatedTransactions)
delete from Psd2Transaction where id in (select Id from #tempDuplicatedTransactions)
delete from [Transaction] where id in (select TransactionId from #tempDuplicatedTransactions)
--rollback;
commit;
DROP TABLE #tempDuplicatedTransactions
mongo - invalidacja wielu consentów z reasonem
invalidation reason enum: https://banqup.visualstudio.com/banqUP/_git/bankup?path=%2Fservices%2FPsd2Hub.Services.ConsentService%2FPsd2Hub.Services.ConsentService.Repository%2FModels%2FInvalidationReason.cs&_a=contents&version=GBmaster
db.getCollection('Consent').updateMany( {'_id': { '$in': [UUID("xxx"),UUID("xxx")] }}, { $set: {'status':'Invalid','invalidationInfo': { 'description':'jakis description', 'reason': 'Unspecified', 'invalidated': ISODate() }}})[curl] - do timezone i nie tylko
curl -X GET http://localhost/api/v2/bank/internal/timezones \
--header 'Accept-Language: en-gb' \
--header 'api-clientId: default' \
--header 'Content-Type: application/json' \
--header 'IP: 127.0.0.1' \
--header 'X-Tenant-Id: unifiedpost' \
--header 'X-Product: account-information' \
--header 'Execution-Invoker: Api' \
--header 'Execution-Id: xxx' -v
[cUrl] - wywołanie updateJoba dla konkretnego paymentu
W paymentId ustawiamy ID paymentu, który chcemy sprawdzić
W executionId ustawiamy swój customowy execId, dla łatwiejszego przeszukiwania logów
curl --location --request POST 'http://localhost/api/job/immediate' --header 'Content-Type: application/json' --header 'execution-Id: PaymentUpdatePP140420221' --header 'X-Build-Id: 224' --header 'X-Product: payment-initiation' --header 'X-Tenant-Id: unifiedpost' --data-raw '{"taskName": "updatePaymentStatus","product": "payment-initiation","TenantId": "unifiedpost","isMultitenancyMode": true,"jobArguments": {"PaymentId": "F7DD826A-3EF5-442E-85DF-DA4FBF8AFC50"}}'SQL - Statystyka przejścia SCA. (UTC pobranie kont minus UTC wywołanie sesji)
SELECT
psd2ba.BankAccountId,
psd2ba.LastApiSessionId,
aps.AccountsFetchedCount,
aps.AccountsProcessedCount,
aps.ExternalUserId,
psd2ba.BankId,
DATEDIFF(second,aps.RequestDate ,psd2ba.ProcessingDate) AS Czas_Procesowanie_SEC
FROM Psd2BankAccount psd2ba with (NOLOCK)
LEFT OUTER JOIN ApiSession aps with (NOLOCK)
ON psd2ba.LastApiSessionId = aps.id
WHERE
aps.sessionsource not in (2,3)
AND aps.accountsprocessedcount <> 0
AND aps.stage not in (6,7)
order by psd2ba.ProcessingDate desc
[rabbit] - wylistowanie kolejek i zbindowanych exchange
plik rabbit_SNT_wylistowanie_exchange_kolejek.py
przykładowy wynik
{
"consumers": 0,
"durable": true,
"messages": 1,
"name": "idhub_accounts_queue",
"state": "running",
"vhost": "master_snt-20220419-1",
"bindings": [],
"self_bindings": [
{
"source": "",
"vhost": "master_snt-20220419-1",
"destination": "idhub_accounts_queue",
"destination_type": "queue",
"routing_key": "idhub_accounts_queue",
"arguments": {},
"properties_key": "idhub_accounts_queue"
},
{
"source": "Psd2Hub.Services.PushService.Models.Santander.IdHub:IdHubAccountMessage",
"vhost": "master_snt-20220419-1",
"destination": "idhub_accounts_queue",
"destination_type": "queue",
"routing_key": "",
"arguments": {},
"properties_key": "~"
}
]
}SQL - długość trwania całej sesji
SELECT top 300 apis.Id as SessionId, apis.BankId, apis.SessionSource, DATEDIFF(s,apis.RequestDate, psd2tr.LastModificationDate ) as Processing_Time_in_sec
FROM ApiSession apis
CROSS APPLY
(
SELECT TOP 1 lastmodificationdate
FROM Psd2Transaction ps
WHERE ps.LastApiSessionId = apis.Id
order by LastModificationDate desc
) psd2tr
where SessionSource in (1,3) and ProcessingCompletedSuccessfully = 1
Mongo - Czas trwania komunikacji z bankiem - plapi i plapiEx
db.getCollection('Log_20220627').aggregate([
{$match:{'machine.entryAssembly':{$in:['Psd2Hub.Connectors.PolishApiExToken, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null', 'Psd2Hub.Connectors.PolishApi, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null']}, 'message':/.*PSD2RestApiCaller.Call.*/i, "executionData.invoker":"Api"}},
{"$group" : {'_id':"$executionData.id", 'start':{$min:'$date'}, 'end':{$max:'$date'}, 'callCount':{$sum: 1}}},
{$project:{'callCount':1,'start':1,'len':{$subtract:['$end', '$start']}}}
])sql - usunięcie wszystkich powiązanych danych dla usera [151]
DECLARE
@externalUser varchar(100) = 'regression_AISP_ALBPPLPW_2022-06-28T11:59:17.445Z';
declare @userid varchar(100) = (select id from AspNetUsers where ExternalId = @externalUser);
DECLARE @sessionId table (id varchar(100));
INSERT INTO @sessionId select id from ApiSession where UserId = @userid;
DECLARE @bankAccountIds table (id int);
INSERT INTO @bankAccountIds select BankAccountId from Psd2BankAccount where ImportApiSessionId in (select * from @sessionId)
DECLARE @psd2bankaccount table (id varchar(100));
insert into @psd2bankaccount select id from Psd2BankAccount where BankAccountId in (select * from @bankAccountIds )
DECLARE @psd2trx table (id varchar(100));
insert into @psd2trx select id from Psd2Transaction where transactionid in (select id from [Transaction] where BankAccountId in (select * from @bankAccountIds))
delete from ApiBankAccountApiSession where ApiSessionId in (select * from @sessionId)
delete from ApiBankAccount where LastApiSessionId in (select * from @sessionId)
delete from BankAccountIdentifier where BankAccountId in (select * from @bankAccountIds)
delete from BankAccountRelationType where BankAccountId in (select * from @bankAccountIds)
delete from BalanceHistory where BankAccountBalanceId in (select id from BankAccountBalance where BankAccountId in (select * from @bankAccountIds) )
delete from BankAccountBalance where BankAccountId in (select * from @bankAccountIds)
delete from TransactionDetail where transactionid in (select id from [Transaction] where BankAccountId in (select * from @bankAccountIds))
delete from consent where Psd2BankAccountId in (select id from Psd2BankAccount where BankAccountId in (select * from @bankAccountIds )) or Psd2TransactionId in (select id from Psd2Transaction where transactionid in (select id from [Transaction] where BankAccountId in (select * from @bankAccountIds)))
delete from Psd2Transaction where id in (select * from @psd2trx)
delete from Psd2BankAccount where BankAccountId in (select * from @bankAccountIds )
delete from [Transaction] where BankAccountId in (select * from @bankAccountIds)
delete from BankAccountIdentifier where BankAccountId in (select * from @bankAccountIds)
delete from BankAccountSync where BankAccountId in (select * from @bankAccountIds)
delete from BankAccount where id in (select * from @bankAccountIds)
delete from BankAccountSync where Psd2SessionId in (select * from @sessionId)
delete from Psd2Session where PsuCredentialsId in (select id from PsuCredentials where UserId = @userid)
delete from ApiSession where UserId = @userid
delete from PsuCredentials where UserId = @userid
delete from UserApiClient where UserExternalId = @externalUser
delete from UserCompany where UserId = @userid
delete from Company where ExternalId = @externalUser
delete from UserIdentifier where userid = @userid
delete from AspNetUsers where ExternalId = @externalUser
healthchecki - sprawdzenie żywotności serwisów (endpoint /ready)
strzela na endpoint /ready w healthcheckach na wszystkie serwisy w stacku oprócz tych wymienionych w grep -vE
for C in `LANG=C docker-compose ps --services| grep -vE "redis|traefik|portainer|demo_app|autoheal|consul|redis-sentinel|redis-ha-node}" |awk '{print $1}'`; do
echo $C
docker-compose exec -T $C bash -c 'exec 3<>/dev/tcp/127.0.0.1/80; echo -e "GET /ready HTTP/1.0\r\n\r\n" >&3 ; cat <&3'
echo ''
done[mongo] wylistowanie wszystkich errorów i warnów pogrupowane po executionData.id
var cursor=db.getCollection('KOLEKCJA').aggregate(
[
{
$match: { $or: [
{
level: /Warn|Error/i,
'exception.message': {$nin: [/Invalid JSON string/i]},
message: {$nin: [/to client endpoint. Missing url in configuration./i]}
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
status: { $max: "$details.headers.Actual-Response-Status" },
min: { $min: "$date" },
max: { $max: "$date" },
messages: {$push: { message: '$message', details: "$details", exception: "$exception" } },
elementCount: { $sum: 1 }
}
},
{
$project: {
executionId: '$_id.executionId',
status: '$status',
name: '$name',
messages: '$messages',
numberOfDocs: '$elementCount',
when: '$min'
}
},
{
$match: {
numberOfDocs : { $gt: 1},
status: '500'
}
},
{
$sort: { howLong: -1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}[mongo] tpp prod - podliczenie per tenant ile występuje błędów Unknown error 258
var cursor=db.getCollection('KOLEKCJA').aggregate(
[
{
$match: { $or: [
{
level: /Warn|Error/i,
'exception.innerException.message':/Unknown error 258/i
}
]
}
},
{
$group: {
_id: { tenant: '$executionData.tenantId', executionId: '$executionData.id' },
min: { $min: "$date" },
tenant: { $min: "$executionData.tenantId" }
}
},
{
$group: {
_id: '$_id.tenant',
count: { "$sum": 1 }
}
},
{
$project: {
tenant: '$_id.tenant',
count: '$count'
}
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}[sql] aktywne zgody w danym okresie
select count(*) liczba_sesji, ConsentId, ass.externaluserid, ass.userid, convert(varchar(7), ass.RequestDate, 120) data_
from Psd2Session ps left join ApiSession ass on ps.id = ass.Id
where convert(varchar(10), ass.RequestDate, 120) >= '2022-01-01'
and convert(varchar(10), ass.RequestDate, 120) <= '2022-08-31'
and ass.SessionSource = 2
group by convert(varchar(7), ass.RequestDate, 120) , ps.ConsentId, ass.externaluserid, ass.userid
order by convert(varchar(7), ass.RequestDate, 120), count(*) desc
[sql] ile aktywnych zgód w ilu bankach per user
select
case
when count (distinct bankid) = 1 then 1
when count (distinct bankid) = 2 then 1.5
when count (distinct bankid) = 3 then 1.75
when count (distinct bankid) >= 4 then 2
end as UUID,
UserId, ExternalUserId, convert(varchar(7), RequestDate, 120) month from ApiSession
where convert(varchar(10), RequestDate, 120) >= '2022-01-01' and convert(varchar(10), RequestDate, 120) <= getdate()
group by UserId, ExternalUserId, convert(varchar(7), RequestDate, 120)
[sql] statystyka ile aktywnych zgód w ilu bankach per user (do umowy) - santander
-- 1 opcja
select
case
when count (distinct bankid) = 1 then 1
when count (distinct bankid) = 2 then 1.5
when count (distinct bankid) = 3 then 1.75
when count (distinct bankid) >= 4 then 2
end as UUID,
UserId, ExternalUserId, convert(varchar(7), RequestDate, 120) month from ApiSession
where convert(varchar(10), RequestDate, 120) >= '2022-08-01' and convert(varchar(10), RequestDate, 120) <= getdate()
and (SessionSource in (2,3) or (SessionSource = 1 and stage in (5, 3)))
group by UserId, ExternalUserId, convert(varchar(7), RequestDate, 120)
-- 2 opcja
select
case
when count (distinct bankid) = 1 then 1
when count (distinct bankid) = 2 then 1.5
when count (distinct bankid) = 3 then 1.75
when count (distinct bankid) >= 4 then 2
end as UUID,
UserId, ExternalUserId, convert(varchar(7), RequestDate, 120) month
from ApiSession
where RequestDate between '2022-08-31T00:00:00.0000000' and getdate()
and (SessionSource in (2,3) or (SessionSource = 1 and stage in (5, 3)))
group by UserId, ExternalUserId, convert(varchar(7), RequestDate, 120)
[sql] statystyka ile aktywnych zgód w ilu bankach per user (do umowy) - inFakt (TPP)
select count(*) liczba_sesji, ConsentId, convert(varchar(7), ass.RequestDate, 120) data_
from Psd2Session ps left join ApiSession ass on ps.id = ass.Id
where convert(varchar(7), ass.RequestDate, 120) = '2022-08'
and ass.SessionSource = 2
group by convert(varchar(7), ass.RequestDate, 120) , ps.ConsentId
order by convert(varchar(7), ass.RequestDate, 120), count(*) desc
[mongo] statystyka kodów 500 na get bankaccount
var cursor=db.getCollection('Log_20220921').aggregate(
[
{
$match: { $or: [
{
level: /Warn|Error/i,
'exception.message': {$nin: [/Invalid JSON string/i]},
message: {$nin: [/to client endpoint. Missing url in configuration./i]}
}
]
}
},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
status: { $max: "$details.headers.Actual-Response-Status" },
min: { $min: "$date" },
max: { $max: "$date" },
messages: {$push: { message: '$message', details: "$details", exception: "$exception" } },
elementCount: { $sum: 1 }
}
},
{
$project: {
executionId: '$_id.executionId',
status: '$status',
name: '$name',
messages: '$messages',
numberOfDocs: '$elementCount',
when: '$min'
}
},
{
$match: {
numberOfDocs : { $gt: 1},
status: '500'
}
},
{
$sort: { howLong: -1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}
[mongo] [fin] [streaming transakcji] pogrupowanie streamingu transakcji w celu znalezienia duplikatów
to na obrobionych dokumentach z kolekcji PushService.StreamingLog (howto tutaj) i importowanych do naszej temp bazy mongo
wynik od razu do csv leci, $out przenosi wyniki do innej, nazwanej kolekcji, nie działa kiedy jest użyty $unwind, bo będzie się pruło o duplikaty pola _id
Array.prototype.toCSV = function toMediocreCSV () {
const results = this;
const headers = {};
results.forEach((result) =>
Object.keys(result).forEach((key) => headers[key] = true)
)
const keys = Object.keys(headers);
print(keys.join(";"));
results.forEach((result) =>
print(keys.map((key) => result[key] != null ? result[key] : '')
.join(";"))
)
}
db.getCollection('duplikaty_streaming').aggregate(
[
{
$match: { $or: [
{
"message" : "TransactionMessage"
}
]
}
},
{
$group: {
_id: { trxId: '$data.id',sessionId: '$sessionId' },
min: { $min: "$meta.timestamp" },
count: { "$sum": 1 }
}
},
{
$group: {
_id: '$_id.sessionId',
trxId: {$push: '$_id.trxId'},
count: { "$sum": 1 }
}
},
{ "$unwind": "$trxId" },
//{ $out: "duplikaty_streaming_obrobione" }
],
{ allowDiskUse: true }
).toArray().toCSV()[mongo] statystyka błędów per user (https://billtobox.zendesk.com/agent/tickets/99843)
var cursor=db.getCollection('99843_staty').aggregate(
[
{
$match: { $or: [
{
level:/Warn|Error/i
}
]
}
},
{
$group: {
_id: { userId: '$executionData.context.userId', },
errorMessage: {$push:'$exception'},
}
},
{
$group: {
_id: '$_id.userId',
errorMessage: {$push: '$errorMessage'},
}
},
{ $unwind: {path: "$errorMessage", preserveNullAndEmptyArrays: true} },
//{ $out: "99843_staty_obrobione" }
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{
print(cursor.next());
}[mongo] [master] ręczne sprawdzenie czasu trwania migracji przeniesienia dokumentów do innych kolekcji - nieoptymalnie
przeniesienie dokumentów z kolekcji Consent z flagą isDeleted: true do kolekcji DeletedConsent - 1_0_5_MoveSoftDeletedConsentsToDeleted.cs i przeniesienie dokumentów z consentId znalezionych w poprzednim zapytaniu i przeniesionych (isDeleted: true) z kolekcji ConsentHistoryItem do DeletedHistoryItem - 1_0_7_MoveDeletedConsentsHistory.cs
0. Jeśli działamy na ondemand / jakimś testowym środowisku na wersji master to trzeba ręcznie
zmienić tę flagę na false a nie usuwać przez endpoint DELETE consent, bo automatycznie będą
przenoszone te dokumenty
1.
Przeniesienie dokumentów z flagą isDeleted: true do tymczasowej kolekcji;
db.getCollection('Consent').aggregate(
[
{
$match: { $or: [
{
isDeleted: true}]}},
{
$group: {
_id: { created: '$created' },
consentId: {$min: '$_id'}
}
},
{ $out: "Consent_isDeletedTrue_ids" }
],
{ allowDiskUse: true }
)
2.
Przeniesienie dokumentów z kolekcji Consent z flagą isDeleted: true do nowej kolekcji;
var documentsToMove = db.Consent.find({isDeleted: true});
documentsToMove.forEach(function(doc) {
db.DeletedConsent.insert(doc);
db.Consent.remove(doc);
});
3.
Przeniesienie dokumentów z kolekcji ConsentHistoryItem (do zgód z id zwróconych w 1. kroku) do nowej kolekcji;
var documentsToMove = db.getCollection('ConsentHistoryItem').aggregate(
[
{
$lookup: {
from: 'Consent_isDeletedTrue_ids',
let:{g:"$additionalInformation.consentId"},
pipeline:[
{$match:{$expr:{$eq:["$$g","$consentId"]}}
}],
as: 'documents'
}
},
{$match: {'documents.consentId' : {$exists: true}}},
{$unwind: '$documents' },
{$project: {'documents' :0}}
],
{ allowDiskUse: true }
);
documentsToMove.forEach(function(doc) {
db.DeletedHistoryItem.insert(doc);
db.ConsentHistoryItem.remove(doc);
});
[mongo] [master] ręczne sprawdzenie czasu trwania migracji przeniesienia dokumentów do innych kolekcji - optymalnie
przeniesienie dokumentów z kolekcji Consent z flagą isDeleted: true do kolekcji DeletedConsent - 1_0_5_MoveSoftDeletedConsentsToDeleted.cs i przeniesienie dokumentów z consentId znalezionych w poprzednim zapytaniu i przeniesionych (isDeleted: true) z kolekcji ConsentHistoryItem do DeletedHistoryItem - 1_0_7_MoveDeletedConsentsHistory.cs
I - przeniesienie consentow z flagą isDeleted: true do nowej kolekcji DeletedConsent
var consentCollectionName = "Consent";
var deletedConsentCollectionName = "DeletedConsent";
var inserts = [];
var deletes = [];
var all = db.getCollection(consentCollectionName).find({'isDeleted':true}).forEach(function(doc)
{
inserts.push(
{
insertOne:{
document:doc
}
});
deletes.push({
deleteOne:{
filter: {"_id":doc["_id"]}
}
});
});
db.getCollection(deletedConsentCollectionName).bulkWrite(inserts);
db.getCollection(consentCollectionName).bulkWrite(deletes);
II - przeniesienie historii consentów znalezionych w I do nowej kolekcji,
ConsentHistoryItemDeleted - bez podzielenia
var deletedConsent = db.getCollection("DeletedConsent");
var historyItem = db.getCollection("ConsentHistoryItem");
var deletedHistoryItem = db.getCollection("ConsentHistoryItemDeleted");
var consentIds = [];
var consentsToMoveHistory = deletedConsent.find({}).forEach(function(doc)
{
consentIds.push(doc._id);
});
// print(consentIds);
var inserts = [];
historyItem.find({'additionalInformation.consentId':{$in:consentIds}}).forEach(function(doc)
{
inserts.push({
insertOne:{
document:doc
}});
});
// print(inserts);
deletedHistoryItem.bulkWrite(inserts);
historyItem.deleteMany({'additionalInformation.consentId':{$in:consentIds}});
II.i - przeniesienie historii consentów znalezionych w I do nowej kolekcji,
ConsentHistoryItemDeleted - z podzieleniem na paczki po 10k dokumentów;
var deletedConsent = db.getCollection("DeletedConsent");
var historyItem = db.getCollection("ConsentHistoryItem");
var deletedHistoryItem = db.getCollection("ConsentHistoryItemDeleted");
var batchSize = 10000;
var consentIds = [];
var consentsToMoveHistory = deletedConsent.find({}).forEach(function(doc)
{
consentIds.push(doc._id);
});
var inserts = [];
var counter = 0;
historyItem.find({'additionalInformation.consentId':{$in:consentIds}}).forEach(function(doc)
{
inserts.push({
insertOne:{
document:doc
}});
counter++;
if(counter % batchSize == 0)
{
deletedHistoryItem.bulkWrite(inserts);
inserts = [];
}
});
if(inserts.length > 0)
{
deletedHistoryItem.bulkWrite(inserts);
}
historyItem.deleteMany({'additionalInformation.consentId':{$in:consentIds}});
[mongo] [master] sprawdzenie ile trwa sesja / sesje
var cursor=db.getCollection('SessionHistoryItem').aggregate(
[
{
'$match': { 'additionalInformation.sessionId':{$in:[UUID("sessionId"),UUID("sessionId")]} }
},
{
'$group': {
'_id': { 'sessionId': '$additionalInformation.sessionId' },
maxTimestamp: { $max: '$timestamp' },
minTimestamp: { $min: '$timestamp' }
}
},
{
$project: {
_id:0,sessionId:'$_id.sessionId', howLong:{ $divide: [{$subtract: [ '$maxTimestamp', '$minTimestamp' ]},1000]},
sessionEnded:'$maxTimestamp',sessionStarted:'$minTimestamp'
}
},
{
$sort: { howLong: -1 }
}
]);
while(cursor.hasNext())
{
print(cursor.next());
}[mongo] [4.4.2 i wyżej] lista wszystkich zgód, informacja czy jest aktywna, na jaki bank, kiedy została założona i kiedy została odwołana [zgody Confirmed]
założenie z https://billtobox.zendesk.com/agent/tickets/127106 - lista wszystkich zgód, informacja czy jest aktywna, na jaki bank, kiedy została założona i kiedy została odwołana - tylko zgody Confirmed
db.getCollection("Consent").aggregate(
[
{
"$unionWith" : {
coll: "DeletedConsent"
}
},
{
$match: {$or:[{status:/confirmed/i},{'consent.status':/confirmed/i}]}
},
{
$group:{
_id:{id:'$id'},
notDeletedUserId:{$max:'$userId'},
ConsentId:{$push:'$_id'},
notDeletedStatus: {$max:'$status'},
notDeletedBankSwiftCode: {$max:'$bankSwiftCode'},
notDeletedConfirmed: {$max:'$confirmed'},
notDeletedInvalidated: {$max:'$invalidationInfo.invalidated'},
notDeletedScaCompleted: {$max:'$sca.completedTimestamp'},
deletedConsentUserId: {$max:'$consent.userId'},
deletedStatus: {$max:'$consent.status'},
deletedBankSwiftCode: {$max:'$consent.bankSwiftCode'},
deletedConfirmed: {$max:'$consent.confirmed'},
deletedInvalidated: {$max:'$consent.invalidationInfo.invalidated'},
deletedScaCompleted: {$max:'$consent.sca.completedTimestamp'},
deletedDate: {$max:'$deletedDate'}
}},
{
$unwind: '$ConsentId'
},
{$project:{
_id:0,
notDeletedUserId:'$notDeletedUserId',
ConsentId:'$ConsentId',
notDeletedStatus: '$notDeletedStatus',
notDeletedBankSwiftCode: '$notDeletedBankSwiftCode',
notDeletedConfirmed: '$notDeletedConfirmed',
notDeletedInvalidated: '$notDeletedInvalidated',
notDeletedScaCompleted: '$notDeletedScaCompleted',
deletedConsentUserId: '$deletedConsentUserId',
deletedStatus: '$deletedStatus',
deletedBankSwiftCode: '$deletedBankSwiftCode',
deletedConfirmed: '$deletedConfirmed',
deletedInvalidated: '$deletedInvalidated',
deletedScaCompleted: '$deletedScaCompleted',
deletedDate: '$deletedDate'
}}
],
{
"allowDiskUse" : true
});[mongo] [poniżej 4.4.2] lista wszystkich zgód, informacja czy jest aktywna, na jaki bank, kiedy została założona i kiedy została odwołana [zgody Confirmed]
to samo co wyżej, tylko na starsze wersje Mongo, < 4.4.2, trzeba uruchomić na 2 kolekcjach, Consent i DeletedConsent
założenie z https://billtobox.zendesk.com/agent/tickets/127106 - lista wszystkich zgód, informacja czy jest aktywna, na jaki bank, kiedy została założona i kiedy została odwołana - tylko zgody Confirmed
do uruchomienia na kolekcji Consent:
var cursor=db.getCollection("Consent").aggregate(
[
{
$match: {$or:[{status:/confirmed/i},{'consent.status':/confirmed/i}]}
},
{
$group:{
_id:{id:'$_id'},
ConsentId:{$push:'$_id'},
notDeletedUserId:{$max:'$userId'},
notDeletedStatus: {$max:'$status'},
notDeletedBankSwiftCode: {$max:'$bankSwiftCode'},
notDeletedConfirmed: {$max:'$confirmed'},
notDeletedInvalidated: {$max:'$invalidationInfo.invalidated'},
notDeletedScaCompleted: {$max:'$sca.completedTimestamp'},
}},
{
"$unwind" : '$ConsentId'
},
{$project:{
_id:0,
ConsentId:'$ConsentId',
notDeletedUserId:'$notDeletedUserId',
notDeletedStatus: '$notDeletedStatus',
notDeletedBankSwiftCode: '$notDeletedBankSwiftCode',
notDeletedConfirmed: '$notDeletedConfirmed',
notDeletedInvalidated: '$notDeletedInvalidated',
notDeletedScaCompleted: '$notDeletedScaCompleted',
}}
],
{
"allowDiskUse" : true
}
);
while(cursor.hasNext())
{
print(cursor.next());
}do uruchomienia na kolekcji DeletedConsent:
var cursor=db.getCollection("DeletedConsent").aggregate(
[
{
$match: {$or:[{status:/confirmed/i},{'consent.status':/confirmed/i}]}
},
{
$group:{
_id:{id:'$_id'},
deletedConsentId:{$push:'$_id'},
deletedConsentUserId: {$max:'$consent.userId'},
deletedStatus: {$max:'$consent.status'},
deletedBankSwiftCode: {$max:'$consent.bankSwiftCode'},
deletedConfirmed: {$max:'$consent.confirmed'},
deletedInvalidated: {$max:'$consent.invalidationInfo.invalidated'},
deletedScaCompleted: {$max:'$consent.sca.completedTimestamp'},
deletedDate: {$max:'$deletedDate'}
}},
{
"$unwind" : '$deletedConsentId'
},
{$project:{
_id:0,
deletedConsentId: '$deletedConsentId',
deletedConsentUserId: '$deletedConsentUserId',
deletedStatus: '$deletedStatus',
deletedBankSwiftCode: '$deletedBankSwiftCode',
deletedConfirmed: '$deletedConfirmed',
deletedInvalidated: '$deletedInvalidated',
deletedScaCompleted: '$deletedScaCompleted',
deletedDate: '$deletedDate'
}}
],
{
"allowDiskUse" : true
}
);
while(cursor.hasNext())
{
print(cursor.next());
}[mongo] statystyki authorize wraz z czasami odpowiedzi od ASPSP na wszystkie calle
plik santander_authorize_statistics.txt
var cursor=db.getCollection('KOLEKCJA').aggregate(
[{
$match: { $or: [
{
message: /api\/v2\/bank\/.*\/authorize/i,
exception: { $exists: false },
logger: 'Next',
'details.method': 'POST'
},
{
message: /api\/v2\/bank\/.*\/authorize/i,
exception: { $exists: false },
logger: 'LogResponse'}
,{message:/psd2r/i}
]}},
{
$group: {
_id: { executionId: '$executionData.id' },
name: { $min: "$message" },
containerId: { $min: {$cond: [{$eq:['$logger','LogResponse']},'$machine.machineName',null]}},
status: { $max: "$details.headers.Actual-Response-Status" },
min: { $min: "$date" },
max: { $max: "$date" },
elementCount: { $sum: 1 },
aspspRequestDuration: {$push: "$details.requestDuration"},
logger: {$push: "$logger"},
minDate1: {$min:{
$cond: [{$eq:['$logger','Next']},'$date',null]
}},
minDate2:{$min:{
$cond: [{$eq:['$logger','LogResponse']},'$date',null]
}}
}},
{$sort: {min :1}},
{
$project: {
_id:0,
executionId: '$_id.executionId',
containerId: '$containerId',
status: '$status',
name: '$name',
aspspRequestDuration: '$aspspRequestDuration',
numberOfDocs: '$elementCount',
when: '$min',
howLong: { $divide: [ {$subtract: [ "$minDate2", "$minDate1" ]}, 1000]}
}},
{
$match: {
numberOfDocs : { $gt: 1},
//status: '200'
}}],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{print(cursor.next());}[curl] get public certificate
sprawdzenie czy jest połączenie z konektora do części publicznej certa
odpalić z konkretnego konektora i odwołać się do częścu x5u certyfikatu
curl --insecure -vvI https://www.example.com 2>&1 | awk 'BEGIN { cert=0 } /^\* SSL connection/ { cert=1 } /^\*/ { if (cert) print }'[cUrl] - wywołanie joba syncAllActiveOldUsersUsingActiveConsents do odświeżenia rachunków
curl --location --request POST 'http://localhost/api/job/immediate' --header 'Content-Type: application/json' --header 'execution-Id: testRefresh' --header 'X-Build-Id: 224' --data-raw '{"taskName": "syncAllActiveOldUsersUsingActiveConsents","jobArguments": {"batchSize": "10", "batchInterval": "1"}}'[mongo] [nowe api] - raport sesji grupowane po id=sessionId
var cursor=db.getCollection('Session').aggregate([
{
$lookup: {
from: 'SessionHistoryItem',
localField: '_id',
foreignField: 'additionalInformation.sessionId',
as: 'mergedData'
}
},
{
$unwind: '$mergedData'
},
{
$match: {
'mergedData.changeType':'UpdateSessionStageStatus',
'mergedData.additionalInformation.description':{$ne:null},
'connectorName':/polishapi/i
}
},
{
$group: {
_id:{sessionId:'$_id'},
authorizationModel: {$max:'$authorizationModel'},
bankSwiftCode:{$max:'$bankSwiftCode'},
consentId: {$max:'$consentId'},
sessionContext: {$max:'$context'},
sessionStartedByUser: {$max:'$isDirectPsu'},
source:{$max:'$source'},
sessionCreationDate: {$max:'$created'},
sessionStages: {$push:'$mergedData.additionalInformation.description'},
event: {$max:'$mergedData.changeType'},
stageStatus:{$max:'$stage.status'},
stageType:{$max:'$stage.type'}
}
},
{
$project: {
_id:0,
sessionId: '$_id.sessionId',
sessionCreationDate:'$sessionCreationDate',
bankAccounts:'$authorizationModel',
bankSwiftCode:'$bankSwiftCode',
consentId:'$consentId',
sessionContext:'$sessionContext',
sessionStartedByUser:'$sessionStartedByUser',
sessionSource:'$source',
sessionStages:'$sessionStages',
event:'$event',
stageType:'$stageType',
stageStatus: {
$switch: {
branches: [
{ case: { $eq: ['$stageStatus', 0] }, then: 'Unknown' },
{ case: { $eq: ['$stageStatus', 1] }, then: 'InProgress' },
{ case: { $eq: ['$stageStatus', 2] }, then: 'Finished' },
{ case: { $eq: ['$stageStatus', 3] }, then: 'Failed' }
],
default:'Unexpected stage status, see logs for further investigation'
}
},
}
}
],
{
"allowDiskUse" : true
});
while(cursor.hasNext())
{
print(cursor.next());
}[mongo] - wylistowanie indeksów we wszystkich kolekcjach (do uruchomienia w shellu bezpośrednio na bazie)
db.getCollectionNames().forEach(function(collection) {
indexes = db.getCollection(collection).getIndexes();
print("Migrations for " + collection + ":");
printjson(indexes);
});[mongo] - wylistowanie indeksów we wszystkich kolekcjach na bazie + pokazanie ich używalności i rozmiaru
DO URUCHOMIENIA W SHELLU BEZPOŚREDNIO NA BAZIE
db = db.getSiblingDB("NAZWA_BAZY");
const collections = db.getCollectionNames();
const result = [];
for (const collection of collections) {
const sizes = db.getCollection(collection).stats().indexSizes;
const indexStats = db.getCollection(collection).aggregate({ "$indexStats": {} }).toArray();
for (const index of indexStats) {
index.collection = collection;
index.size = sizes[index.name];
result.push(index)
}
}
print (result)[sql] zestawienie ilości bankaccount z podziałem na wartości w kolumnie isActive
select
count(*) as total,
Sum(iif(isactive in (1), 1, 0)) as active,
Sum(iif(isactive in (0), 1, 0)) as notActive
from
dbo.BankAccount
[mongo] zestawienie czegoś z podaniem miesiąca i roku z daty z sortowaniem
var cursor=db.getCollection('Consent').aggregate(
[{
$match: { confirmed:{$gte:ISODate("2024-01-01T00:00:00.000Z"),
$lt:ISODate("2025-05-19T10:00:00.000Z")},"product" : "identity-verification",invalidationInfo:null}
},
{
$group: {
_id: { month: {$month :"$confirmed" }, year: {$year :"$confirmed" }},
count: {$sum:1}
}},
{
$project:{
_id:0,
month: '$_id.month',
year: '$_id.year',
count: '$count'
}
},
{
$sort: { year: 1, month: 1 }
}
],
{ allowDiskUse: true }
);
while(cursor.hasNext())
{print(cursor.next());}