Skip to content

Commit

Permalink
Merge tag '8.52.0' of github.com:prebid/Prebid.js into UOE-10602
Browse files Browse the repository at this point in the history
  • Loading branch information
pm-azhar-mulla authored and pm-azhar-mulla committed Jun 17, 2024
2 parents 9607cc2 + 0c0bc0e commit 4fc7f88
Show file tree
Hide file tree
Showing 94 changed files with 3,384 additions and 360 deletions.
124 changes: 124 additions & 0 deletions .github/workflows/jscpd.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
name: Check for Duplicated Code

on:
pull_request_target:
branches:
- master

jobs:
check-duplication:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for all branches
ref: ${{ github.event.pull_request.head.sha }}

- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'

- name: Install dependencies
run: |
npm install -g jscpd diff-so-fancy
- name: Create jscpd config file
run: |
echo '{
"threshold": 20,
"minTokens": 50,
"reporters": [
"json"
],
"output": "./",
"pattern": "**/*.js",
"ignore": "**/*spec.js"
}' > .jscpd.json
- name: Run jscpd on entire codebase
run: jscpd

- name: Fetch base and target branches
run: |
git fetch origin +refs/heads/${{ github.event.pull_request.base.ref }}:refs/remotes/origin/${{ github.event.pull_request.base.ref }}
git fetch origin +refs/pull/${{ github.event.pull_request.number }}/merge:refs/remotes/pull/${{ github.event.pull_request.number }}/merge
- name: Get the diff
run: git diff --name-only origin/${{ github.event.pull_request.base.ref }}...refs/remotes/pull/${{ github.event.pull_request.number }}/merge > changed_files.txt

- name: List generated files (debug)
run: ls -l

- name: Upload unfiltered jscpd report
if: always()
uses: actions/upload-artifact@v4
with:
name: unfiltered-jscpd-report
path: ./jscpd-report.json

- name: Filter jscpd report for changed files
run: |
if [ ! -f ./jscpd-report.json ]; then
echo "jscpd-report.json not found"
exit 1
fi
echo "Filtering jscpd report for changed files..."
CHANGED_FILES=$(jq -R -s -c 'split("\n")[:-1]' changed_files.txt)
echo "Changed files: $CHANGED_FILES"
jq --argjson changed_files "$CHANGED_FILES" '
.duplicates | map(select(
(.firstFile?.name as $fname | $changed_files | any(. == $fname)) or
(.secondFile?.name as $sname | $changed_files | any(. == $sname))
))
' ./jscpd-report.json > filtered-jscpd-report.json
cat filtered-jscpd-report.json
- name: Check if filtered jscpd report exists
id: check_filtered_report
run: |
if [ $(wc -l < ./filtered-jscpd-report.json) -gt 1 ]; then
echo "filtered_report_exists=true" >> $GITHUB_ENV
else
echo "filtered_report_exists=false" >> $GITHUB_ENV
fi
- name: Upload filtered jscpd report
if: env.filtered_report_exists == 'true'
uses: actions/upload-artifact@v4
with:
name: filtered-jscpd-report
path: ./filtered-jscpd-report.json

- name: Post GitHub comment
if: env.filtered_report_exists == 'true'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const filteredReport = JSON.parse(fs.readFileSync('filtered-jscpd-report.json', 'utf8'));
let comment = "Whoa there, partner! 🌵🤠 We wrangled some duplicated code in your PR:\n\n";
function link(dup) {
return `https://github.com/${{ github.event.repository.full_name }}/blob/${{ github.event.pull_request.head.sha }}/${dup.name}#L${dup.start}-L${dup.end - 1}`
}
filteredReport.forEach(duplication => {
const firstFile = duplication.firstFile;
const secondFile = duplication.secondFile;
const lines = duplication.lines;
comment += `- [\`${firstFile.name}\`](${link(firstFile)}) has ${lines} duplicated lines with [\`${secondFile.name}\`](${link(secondFile)})\n`;
});
comment += "\nReducing code duplication by importing common functions from a library not only makes our code cleaner but also easier to maintain. Please move the common code from both files into a library and import it in each. Keep up the great work! 🚀";
github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: comment
});
- name: Fail if duplications are found
if: env.filtered_report_exists == 'true'
run: |
echo "Duplications found, failing the check."
exit 1
43 changes: 25 additions & 18 deletions gulpfile.js
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,17 @@ function makeVerbose(config = webpackConfig) {
});
}

function prebidSource(webpackCfg) {
var externalModules = helpers.getArgModules();

const analyticsSources = helpers.getAnalyticsSources();
const moduleSources = helpers.getModulePaths(externalModules);

return gulp.src([].concat(moduleSources, analyticsSources, 'src/prebid.js'))
.pipe(helpers.nameModules(externalModules))
.pipe(webpackStream(webpackCfg, webpack));
}

function makeDevpackPkg(config = webpackConfig) {
return function() {
var cloned = _.cloneDeep(config);
Expand All @@ -163,14 +174,7 @@ function makeDevpackPkg(config = webpackConfig) {
.filter((use) => use.loader === 'babel-loader')
.forEach((use) => use.options = Object.assign({}, use.options, babelConfig));

var externalModules = helpers.getArgModules();

const analyticsSources = helpers.getAnalyticsSources();
const moduleSources = helpers.getModulePaths(externalModules);

return gulp.src([].concat(moduleSources, analyticsSources, 'src/prebid.js'))
.pipe(helpers.nameModules(externalModules))
.pipe(webpackStream(cloned, webpack))
return prebidSource(cloned)
.pipe(gulp.dest('build/dev'))
.pipe(connect.reload());
}
Expand All @@ -183,14 +187,7 @@ function makeWebpackPkg(config = webpackConfig) {
}

return function buildBundle() {
var externalModules = helpers.getArgModules();

const analyticsSources = helpers.getAnalyticsSources();
const moduleSources = helpers.getModulePaths(externalModules);

return gulp.src([].concat(moduleSources, analyticsSources, 'src/prebid.js'))
.pipe(helpers.nameModules(externalModules))
.pipe(webpackStream(cloned, webpack))
return prebidSource(cloned)
.pipe(gulp.dest('build/dist'));
}
}
Expand Down Expand Up @@ -413,7 +410,9 @@ function runKarma(options, done) {
// the karma server appears to leak memory; starting it multiple times in a row will run out of heap
// here we run it in a separate process to bypass the problem
options = Object.assign({browsers: helpers.parseBrowserArgs(argv)}, options)
const child = fork('./karmaRunner.js');
const child = fork('./karmaRunner.js', null, {
env: Object.assign({}, options.env, process.env)
});
child.on('exit', (exitCode) => {
if (exitCode) {
done(new Error('Karma tests failed with exit code ' + exitCode));
Expand All @@ -426,7 +425,15 @@ function runKarma(options, done) {

// If --file "<path-to-test-file>" is given, the task will only run tests in the specified file.
function testCoverage(done) {
runKarma({coverage: true, browserstack: false, watch: false, file: argv.file}, done);
runKarma({
coverage: true,
browserstack: false,
watch: false,
file: argv.file,
env: {
NODE_OPTIONS: '--max-old-space-size=8096'
}
}, done);
}

function coveralls() { // 2nd arg is a dependency: 'test' must be finished
Expand Down
12 changes: 12 additions & 0 deletions libraries/pbsExtensions/processors/aliases.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import adapterManager from '../../../src/adapterManager.js';
import {config} from '../../../src/config.js';
import {deepSetValue} from '../../../src/utils.js';

export function setRequestExtPrebidAliases(ortbRequest, bidderRequest, context, {am = adapterManager} = {}) {
Expand All @@ -7,11 +8,22 @@ export function setRequestExtPrebidAliases(ortbRequest, bidderRequest, context,
// adding alias only if alias source bidder exists and alias isn't configured to be standalone
// pbs adapter
if (!bidder || !bidder.getSpec().skipPbsAliasing) {
// set alias
deepSetValue(
ortbRequest,
`ext.prebid.aliases.${bidderRequest.bidderCode}`,
am.aliasRegistry[bidderRequest.bidderCode]
);

// set alias gvlids if present also
const gvlId = config.getConfig(`gvlMapping.${bidderRequest.bidderCode}`) || bidder?.getSpec?.().gvlid;
if (gvlId) {
deepSetValue(
ortbRequest,
`ext.prebid.aliasgvlids.${bidderRequest.bidderCode}`,
gvlId
);
}
}
}
}
3 changes: 2 additions & 1 deletion modules/.submodules.json
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@
"euidIdSystem",
"unifiedIdSystem",
"verizonMediaIdSystem",
"zeotapIdPlusIdSystem"
"zeotapIdPlusIdSystem",
"yandexIdSystem"
],
"adpod": [
"freeWheelAdserverVideo",
Expand Down
2 changes: 1 addition & 1 deletion modules/33acrossBidAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import {
import {BANNER, VIDEO} from '../src/mediaTypes.js';
import {isSlotMatchingAdUnitCode} from '../libraries/gptUtils/gptUtils.js';

// **************************** UTILS *************************** //
// **************************** UTILS ************************** //
const BIDDER_CODE = '33across';
const BIDDER_ALIASES = ['33across_mgni'];
const END_POINT = 'https://ssc.33across.com/api/v1/hb';
Expand Down
20 changes: 18 additions & 2 deletions modules/adhashBidAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const VERSION = '3.6';
const BAD_WORD_STEP = 0.1;
const BAD_WORD_MIN = 0.2;
const ADHASH_BIDDER_CODE = 'adhash';
const storage = getStorageManager({ bidderCode: ADHASH_BIDDER_CODE });

/**
* Function that checks the page where the ads are being served for brand safety.
Expand Down Expand Up @@ -120,7 +121,7 @@ function brandSafety(badWords, maxScore) {
.replaceAll(/\s\s+/g, ' ')
.toLowerCase()
.trim();
const content = window.top.document.body.innerText.toLowerCase();
const content = window.top.document.body.textContent.toLowerCase();
// \p{L} matches a single unicode code point in the category 'letter'. Matches any kind of letter from any language.
const regexp = new RegExp('[\\p{L}]+', 'gu');
const wordsMatched = content.match(regexp);
Expand Down Expand Up @@ -171,7 +172,6 @@ export const spec = {
},

buildRequests: (validBidRequests, bidderRequest) => {
const storage = getStorageManager({ bidderCode: ADHASH_BIDDER_CODE });
const { gdprConsent } = bidderRequest;
const bidRequests = [];
const body = document.body;
Expand Down Expand Up @@ -199,9 +199,11 @@ export const spec = {
position: validBidRequests[i].adUnitCode
};
let recentAds = [];
let recentAdsPrebid = [];
if (storage.localStorageIsEnabled()) {
const prefix = validBidRequests[i].params.prefix || 'adHash';
recentAds = JSON.parse(storage.getDataFromLocalStorage(prefix + 'recentAds') || '[]');
recentAdsPrebid = JSON.parse(storage.getDataFromLocalStorage(prefix + 'recentAdsPrebid') || '[]');
}

// Needed for the ad density calculation
Expand Down Expand Up @@ -237,6 +239,7 @@ export const spec = {
blockedCreatives: [],
currentTimestamp: (new Date().getTime() / 1000) | 0,
recentAds: recentAds,
recentAdsPrebid: recentAdsPrebid,
GDPRApplies: gdprConsent ? gdprConsent.gdprApplies : null,
GDPR: gdprConsent ? gdprConsent.consentString : null,
servedAdsCount: window.adsCount,
Expand All @@ -263,6 +266,19 @@ export const spec = {
return [];
}

if (storage.localStorageIsEnabled()) {
const prefix = request.bidRequest.params.prefix || 'adHash';
let recentAdsPrebid = JSON.parse(storage.getDataFromLocalStorage(prefix + 'recentAdsPrebid') || '[]');
recentAdsPrebid.push([
(new Date().getTime() / 1000) | 0,
responseBody.creatives[0].advertiserId,
responseBody.creatives[0].budgetId,
responseBody.creatives[0].expectedHashes.length ? responseBody.creatives[0].expectedHashes[0] : '',
]);
let recentAdsPrebidFinal = JSON.stringify(recentAdsPrebid.slice(-100));
storage.setDataInLocalStorage(prefix + 'recentAdsPrebid', recentAdsPrebidFinal);
}

const publisherURL = JSON.stringify(request.bidRequest.params.platformURL);
const bidderURL = request.bidRequest.params.bidderURL || 'https://bidder.adhash.com';
const oneTimeId = request.bidRequest.adUnitCode + Math.random().toFixed(16).replace('0.', '.');
Expand Down
22 changes: 11 additions & 11 deletions modules/adnuntiusBidAdapter.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { registerBidder } from '../src/adapters/bidderFactory.js';
import { BANNER, VIDEO } from '../src/mediaTypes.js';
import { isStr, deepAccess } from '../src/utils.js';
import {isStr, isEmpty, deepAccess, getUnixTimestampFromNow, convertObjectToArray} from '../src/utils.js';
import { config } from '../src/config.js';
import { getStorageManager } from '../src/storageManager.js';

Expand All @@ -19,10 +19,6 @@ const METADATA_KEY = 'adn.metaData';
const METADATA_KEY_SEPARATOR = '@@@';

export const misc = {
getUnixTimestamp: function (addDays, asMinutes) {
const multiplication = addDays / (asMinutes ? 1440 : 1);
return Date.now() + (addDays && addDays > 0 ? (1000 * 60 * 60 * 24 * multiplication) : 0);
}
};

const storageTool = (function () {
Expand Down Expand Up @@ -50,11 +46,11 @@ const storageTool = (function () {
if (datum.key === 'voidAuIds' && Array.isArray(datum.value)) {
return true;
}
return datum.key && datum.value && datum.exp && datum.exp > misc.getUnixTimestamp() && (!network || network === datum.network);
return datum.key && datum.value && datum.exp && datum.exp > getUnixTimestampFromNow() && (!network || network === datum.network);
}) : [];
const voidAuIdsEntry = filteredEntries.find(entry => entry.key === 'voidAuIds');
if (voidAuIdsEntry) {
const now = misc.getUnixTimestamp();
const now = getUnixTimestampFromNow();
voidAuIdsEntry.value = voidAuIdsEntry.value.filter(voidAuId => voidAuId.auId && voidAuId.exp > now);
if (!voidAuIdsEntry.value.length) {
filteredEntries = filteredEntries.filter(entry => entry.key !== 'voidAuIds');
Expand All @@ -73,7 +69,7 @@ const storageTool = (function () {
const notNewExistingAuIds = currentVoidAuIds.filter(auIdObj => {
return newAuIds.indexOf(auIdObj.value) < -1;
}) || [];
const oneDayFromNow = misc.getUnixTimestamp(1);
const oneDayFromNow = getUnixTimestampFromNow(1);
const apiIdsArray = newAuIds.map(auId => {
return { exp: oneDayFromNow, auId: auId };
}) || [];
Expand All @@ -86,7 +82,7 @@ const storageTool = (function () {
if (key !== 'voidAuIds') {
metaAsObj[key + METADATA_KEY_SEPARATOR + network] = {
value: apiRespMetadata[key],
exp: misc.getUnixTimestamp(100),
exp: getUnixTimestampFromNow(100),
network: network
}
}
Expand Down Expand Up @@ -201,10 +197,14 @@ const targetingTool = (function() {
},
mergeKvsFromOrtb: function(bidTargeting, bidderRequest) {
const kv = getKvsFromOrtb(bidderRequest || {});
if (!kv) {
if (isEmpty(kv)) {
return;
}
bidTargeting.kv = {...kv, ...bidTargeting.kv};
if (bidTargeting.kv && !Array.isArray(bidTargeting.kv)) {
bidTargeting.kv = convertObjectToArray(bidTargeting.kv);
}
bidTargeting.kv = bidTargeting.kv || [];
bidTargeting.kv = bidTargeting.kv.concat(convertObjectToArray(kv));
}
}
})();
Expand Down
2 changes: 1 addition & 1 deletion modules/adotBidAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ function buildVideo(video) {
mimes: video.mimes,
minduration: video.minduration,
maxduration: video.maxduration,
placement: video.placement,
placement: video.plcmt,
playbackmethod: video.playbackmethod,
pos: video.position || 0,
protocols: video.protocols,
Expand Down
Loading

0 comments on commit 4fc7f88

Please sign in to comment.