Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-11-21 00:14:21 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-11-21 00:14:21 +0300
commit235f755398a6a199b22e4924e7a81670b0dfdaef (patch)
tree84a23b2343ef5a4c0bd03d5ab03ba5370f2b5503
parente12ad88e786d7a91d94d92b26bce9e984d9692f5 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--Gemfile6
-rw-r--r--Gemfile.checksum17
-rw-r--r--Gemfile.lock29
-rw-r--r--app/assets/javascripts/behaviors/markdown/copy_as_gfm.js4
-rw-r--r--app/assets/javascripts/groups/components/group_item.vue4
-rw-r--r--app/assets/javascripts/groups/components/item_stats.vue2
-rw-r--r--app/assets/javascripts/repository/components/blob_content_viewer.vue2
-rw-r--r--app/assets/javascripts/repository/components/blob_viewers/index.js8
-rw-r--r--app/assets/javascripts/repository/mixins/highlight_mixin.js23
-rw-r--r--app/assets/javascripts/vue_shared/components/source_viewer/constants.js1
-rw-r--r--app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue19
-rw-r--r--app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_utils.js36
-rw-r--r--app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_worker.js4
-rw-r--r--app/assets/javascripts/vue_shared/components/user_access_role_badge.vue9
-rw-r--r--app/assets/javascripts/work_items/components/shared/work_item_link_child_contents.vue13
-rw-r--r--app/assets/javascripts/work_items/components/work_item_links/work_item_link_child.vue6
-rw-r--r--app/assets/javascripts/work_items/components/work_item_links/work_item_tree.vue56
-rw-r--r--app/services/metrics_service.rb6
-rw-r--r--config/feature_flags/development/access_rest_chat.yml8
-rw-r--r--config/feature_flags/development/prom_metrics_rust.yml8
-rw-r--r--config/initializers/7_redis.rb3
-rw-r--r--config/initializers/sidekiq.rb14
-rw-r--r--config/initializers/sidekiq_cluster.rb2
-rw-r--r--config/webpack.config.js3
-rw-r--r--db/migrate/20231120071202_remove_not_null_constraint_from_member_role_column.rb14
-rw-r--r--db/schema_migrations/202311200712021
-rw-r--r--doc/administration/logs/index.md11
-rw-r--r--doc/administration/nfs.md1
-rw-r--r--doc/administration/postgresql/replication_and_failover.md12
-rw-r--r--doc/subscriptions/customers_portal.md18
-rw-r--r--doc/user/project/import/bitbucket.md26
-rw-r--r--lib/gitlab/instrumentation/redis_client_middleware.rb46
-rw-r--r--lib/gitlab/instrumentation/redis_helper.rb2
-rw-r--r--lib/gitlab/memory/watchdog/handlers/sidekiq_handler.rb4
-rw-r--r--lib/gitlab/patch/sidekiq_cron_poller.rb4
-rw-r--r--lib/gitlab/redis/wrapper.rb23
-rw-r--r--lib/gitlab/runtime.rb2
-rw-r--r--lib/gitlab/sidekiq_config.rb3
-rw-r--r--lib/gitlab/sidekiq_logging/structured_logger.rb8
-rw-r--r--lib/gitlab/sidekiq_middleware/server_metrics.rb2
-rw-r--r--lib/gitlab/sidekiq_migrate_jobs.rb19
-rw-r--r--package.json2
-rw-r--r--spec/frontend/repository/mixins/highlight_mixin_spec.js50
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js30
-rw-r--r--spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb224
-rw-r--r--spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb6
-rw-r--r--spec/lib/gitlab/runtime_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb2
-rw-r--r--spec/support/helpers/dns_helpers.rb13
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb4
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb1
-rw-r--r--spec/support/shared_examples/redis/redis_shared_examples.rb61
-rw-r--r--spec/support/sidekiq.rb10
-rw-r--r--spec/support/sidekiq_middleware.rb9
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/Gemfile1
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/Gemfile.lock68
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/README.md2
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/gitlab-sidekiq-fetcher.gemspec4
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/base_reliable_fetch.rb26
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/interrupted_set.rb2
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/semi_reliable_fetch.rb15
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/spec/base_reliable_fetch_spec.rb38
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/spec/fetch_shared_examples.rb73
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/spec/reliable_fetch_spec.rb1
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/spec/semi_reliable_fetch_spec.rb19
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/spec/spec_helper.rb2
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/tests/interruption/config.rb1
-rw-r--r--vendor/gems/sidekiq-reliable-fetch/tests/reliability/config.rb1
-rw-r--r--vite.config.js9
-rw-r--r--yarn.lock22
76 files changed, 481 insertions, 749 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 248b2d89c7a..5b711ab30f5 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-5b092369fc03602f5d0d99ab8f475875ab9a41a6
+276bfb610a543a5de88a1d5f69219811dad13a6b
diff --git a/Gemfile b/Gemfile
index 30d8bc7622a..3d0759cc6d5 100644
--- a/Gemfile
+++ b/Gemfile
@@ -247,8 +247,8 @@ gem 'state_machines-activerecord', '~> 0.8.0' # rubocop:todo Gemfile/MissingFeat
gem 'acts-as-taggable-on', '~> 10.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Background jobs
-gem 'sidekiq', '~> 7.1.6' # rubocop:todo Gemfile/MissingFeatureCategory
-gem 'sidekiq-cron', '~> 1.9.0' # rubocop:todo Gemfile/MissingFeatureCategory
+gem 'sidekiq', '~> 6.5.10' # rubocop:todo Gemfile/MissingFeatureCategory
+gem 'sidekiq-cron', '~> 1.8.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'gitlab-sidekiq-fetcher', path: 'vendor/gems/sidekiq-reliable-fetch', require: 'sidekiq-reliable-fetch' # rubocop:todo Gemfile/MissingFeatureCategory
# Cron Parser
@@ -383,7 +383,7 @@ gem 'snowplow-tracker', '~> 0.8.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Metrics
gem 'webrick', '~> 1.8.1', require: false # rubocop:todo Gemfile/MissingFeatureCategory
-gem 'prometheus-client-mmap', '~> 0.28', '>= 0.28.1', require: 'prometheus/client' # rubocop:todo Gemfile/MissingFeatureCategory
+gem 'prometheus-client-mmap', '~> 1.0', require: 'prometheus/client' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'warning', '~> 1.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
diff --git a/Gemfile.checksum b/Gemfile.checksum
index bd22ef4e64c..e94f6bdf91c 100644
--- a/Gemfile.checksum
+++ b/Gemfile.checksum
@@ -458,11 +458,11 @@
{"name":"prime","version":"0.1.2","platform":"ruby","checksum":"d4e956cadfaf04de036dc7dc74f95bf6a285a62cc509b28b7a66b245d19fe3a4"},
{"name":"prism","version":"0.17.1","platform":"ruby","checksum":"e63f86df2c36aecd578431ee0c9d1f66cdef98a406f0a11e7da949514212cbcd"},
{"name":"proc_to_ast","version":"0.1.0","platform":"ruby","checksum":"92a73fa66e2250a83f8589f818b0751bcf227c68f85916202df7af85082f8691"},
-{"name":"prometheus-client-mmap","version":"0.28.1","platform":"aarch64-linux","checksum":"b190045625ee8f8b3ef90e583ef7fadeac745810c8a243f1ed5e9b47c18146f0"},
-{"name":"prometheus-client-mmap","version":"0.28.1","platform":"arm64-darwin","checksum":"9e7022848493b882d1de9f42d7784f9821e83b2c3b4b2dc9a12c2c8269209a6e"},
-{"name":"prometheus-client-mmap","version":"0.28.1","platform":"ruby","checksum":"92fb3989a16927fb0cacfcb3ebc6c8ea5e4abf82e4aef22ab62c3c4b8f17e52a"},
-{"name":"prometheus-client-mmap","version":"0.28.1","platform":"x86_64-darwin","checksum":"66e7cad96ad581174edf4f1f52da141e5a15389ce3283fba7b4e3e5968dd46b7"},
-{"name":"prometheus-client-mmap","version":"0.28.1","platform":"x86_64-linux","checksum":"4d3e92a249b16e41ef3e55078537bca599659578c0f86e31d195429c6e5e1f3a"},
+{"name":"prometheus-client-mmap","version":"1.0.0","platform":"aarch64-linux","checksum":"6a4bb32e7f7c554bf9d7d1c6c1a40ad3cd94d8bcb8265f6da4fe7601761d9347"},
+{"name":"prometheus-client-mmap","version":"1.0.0","platform":"arm64-darwin","checksum":"e92ac0806393640dd91d6048d9ab8cfec0d7b6f40555ea80c930414968c38b94"},
+{"name":"prometheus-client-mmap","version":"1.0.0","platform":"ruby","checksum":"b40e2ab74d51593ab5b4290a8dbe740347c595709ae56d3e91cacaf53e46a716"},
+{"name":"prometheus-client-mmap","version":"1.0.0","platform":"x86_64-darwin","checksum":"ba70cc88e6caed5bc8ae66aa7323a032d1d1fd51d7ebd94a017426a9d7e534e1"},
+{"name":"prometheus-client-mmap","version":"1.0.0","platform":"x86_64-linux","checksum":"c664b2e047e032b7d706f22bbc57c213f8e551f69c7eac738c96e0a4b2fb9796"},
{"name":"protocol","version":"2.0.0","platform":"ruby","checksum":"dcd7c509e53b8cd6284e965a2e2e71d5291ca9e2d50acfa3d7ee0561c0df16b9"},
{"name":"pry","version":"0.14.2","platform":"java","checksum":"fd780670977ba04ff7ee32dabd4d02fe4bf02e977afe8809832d5dca1412862e"},
{"name":"pry","version":"0.14.2","platform":"ruby","checksum":"c4fe54efedaca1d351280b45b8849af363184696fcac1c72e0415f9bdac4334d"},
@@ -495,7 +495,7 @@
{"name":"rake","version":"13.0.6","platform":"ruby","checksum":"5ce4bf5037b4196c24ac62834d8db1ce175470391026bd9e557d669beeb19097"},
{"name":"rb-fsevent","version":"0.11.2","platform":"ruby","checksum":"43900b972e7301d6570f64b850a5aa67833ee7d87b458ee92805d56b7318aefe"},
{"name":"rb-inotify","version":"0.10.1","platform":"ruby","checksum":"050062d4f31d307cca52c3f6a7f4b946df8de25fc4bd373e1a5142e41034a7ca"},
-{"name":"rb_sys","version":"0.9.78","platform":"ruby","checksum":"635275db76f8885443dbd4ce8dd9b011ea27cb35dfa67ad1f268aba4f4fe7a26"},
+{"name":"rb_sys","version":"0.9.83","platform":"ruby","checksum":"0ed80df79aa08b942af731d93a676f2d885428267f2fbf138f9b6b7809c6455e"},
{"name":"rbtrace","version":"0.4.14","platform":"ruby","checksum":"162bbf89cecabfc4f09c869b655f6f3a679c4870ebb7cbdcadf7393a81cc1769"},
{"name":"rbtree","version":"0.4.6","platform":"ruby","checksum":"14eea4469b24fd2472542e5f3eb105d6344c8ccf36f0b56d55fdcfeb4e0f10fc"},
{"name":"rchardet","version":"1.8.0","platform":"ruby","checksum":"693acd5253d5ade81a51940697955f6dd4bb2f0d245bda76a8e23deec70a52c7"},
@@ -514,7 +514,6 @@
{"name":"redcarpet","version":"3.6.0","platform":"ruby","checksum":"8ad1889c0355ff4c47174af14edd06d62f45a326da1da6e8a121d59bdcd2e9e9"},
{"name":"redis","version":"4.8.0","platform":"ruby","checksum":"2000cf5014669c9dc821704b6d322a35a9a33852a95208911d9175d63b448a44"},
{"name":"redis-actionpack","version":"5.3.0","platform":"ruby","checksum":"3fb1ad0a8fd9d26a289c9399bb609dcaef38bf37711e6f677a53ca728fc19140"},
-{"name":"redis-client","version":"0.18.0","platform":"ruby","checksum":"a93bd1f99c024bb7f8e21eff7bdbcb16d85dbcbfe3f6ed051239e38d4c127704"},
{"name":"redis-rack","version":"2.1.4","platform":"ruby","checksum":"0872eecb303e483c3863d6bd0d47323d230640d41c1a4ac4a2c7596ec0b1774c"},
{"name":"redis-store","version":"1.9.1","platform":"ruby","checksum":"7b4c7438d46f7b7ce8f67fc0eda3a04fc67d32d28cf606cc98a5df4d2b77071d"},
{"name":"regexp_parser","version":"2.6.0","platform":"ruby","checksum":"f163ba463a45ca2f2730e0902f2475bb0eefcd536dfc2f900a86d1e5a7d7a556"},
@@ -590,8 +589,8 @@
{"name":"sexp_processor","version":"4.17.0","platform":"ruby","checksum":"4daa4874ce1838cd801c65e66ed5d4f140024404a3de7482c36d4ef2604dff6f"},
{"name":"shellany","version":"0.0.1","platform":"ruby","checksum":"0e127a9132698766d7e752e82cdac8250b6adbd09e6c0a7fbbb6f61964fedee7"},
{"name":"shoulda-matchers","version":"5.1.0","platform":"ruby","checksum":"a01d20589989e9653ab4a28c67d9db2b82bcf0a2496cf01d5e1a95a4aaaf5b07"},
-{"name":"sidekiq","version":"7.1.6","platform":"ruby","checksum":"7859da66d5bcef3c22bea2c3091d08c866890168e003f5bf4dea197dc37843a2"},
-{"name":"sidekiq-cron","version":"1.9.1","platform":"ruby","checksum":"79d11c79c686ec2e540c1932ccd12b0c07e7c228d28a0a7c515a6c7fcd3c22df"},
+{"name":"sidekiq","version":"6.5.12","platform":"ruby","checksum":"b4f93b2204c42220d0b526a7b8e0c49b5f9da82c1ce1a05d2baf1e8f744c197f"},
+{"name":"sidekiq-cron","version":"1.8.0","platform":"ruby","checksum":"47da72ca73ce5b71896aaf7e7c4391386ec517dd003f184c50c0b727d82eb0ca"},
{"name":"sigdump","version":"0.2.4","platform":"ruby","checksum":"0bf2176e55c1a262788623fe5ea57caddd6ba2abebe5e349d9d5e7c3a3010ed7"},
{"name":"signet","version":"0.17.0","platform":"ruby","checksum":"1d2831930dc28da32e34bec68cf7ded97ee2867b208f97c500ee293829cb0004"},
{"name":"simple_po_parser","version":"1.1.6","platform":"ruby","checksum":"122687d44d3de516a0e69e2f383a4180f5015e8c5ed5a7f2258f2b376f64cbf3"},
diff --git a/Gemfile.lock b/Gemfile.lock
index 809d2a19eac..78421e05b9c 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -169,9 +169,9 @@ PATH
PATH
remote: vendor/gems/sidekiq-reliable-fetch
specs:
- gitlab-sidekiq-fetcher (0.11.0)
+ gitlab-sidekiq-fetcher (0.10.0)
json (>= 2.5)
- sidekiq (~> 7.0)
+ sidekiq (~> 6.1)
GEM
remote: https://rubygems.org/
@@ -1249,7 +1249,7 @@ GEM
coderay
parser
unparser
- prometheus-client-mmap (0.28.1)
+ prometheus-client-mmap (1.0.0)
rb_sys (~> 0.9)
protocol (2.0.0)
ruby_parser (~> 3.0)
@@ -1329,7 +1329,7 @@ GEM
rb-fsevent (0.11.2)
rb-inotify (0.10.1)
ffi (~> 1.0)
- rb_sys (0.9.78)
+ rb_sys (0.9.83)
rbtrace (0.4.14)
ffi (>= 1.0.6)
msgpack (>= 0.4.3)
@@ -1347,8 +1347,6 @@ GEM
actionpack (>= 5, < 8)
redis-rack (>= 2.1.0, < 3)
redis-store (>= 1.1.0, < 2)
- redis-client (0.18.0)
- connection_pool
redis-rack (2.1.4)
rack (>= 2.0.8, < 3)
redis-store (>= 1.2, < 2)
@@ -1531,13 +1529,12 @@ GEM
shellany (0.0.1)
shoulda-matchers (5.1.0)
activesupport (>= 5.2.0)
- sidekiq (7.1.6)
- concurrent-ruby (< 2)
- connection_pool (>= 2.3.0)
- rack (>= 2.2.4)
- redis-client (>= 0.14.0)
- sidekiq-cron (1.9.1)
- fugit (~> 1.8)
+ sidekiq (6.5.12)
+ connection_pool (>= 2.2.5, < 3)
+ rack (~> 2.0)
+ redis (>= 4.5.0, < 5)
+ sidekiq-cron (1.8.0)
+ fugit (~> 1)
sidekiq (>= 4.2.1)
sigdump (0.2.4)
signet (0.17.0)
@@ -2001,7 +1998,7 @@ DEPENDENCIES
pg_query (~> 4.2.3)
png_quantizator (~> 0.2.1)
premailer-rails (~> 1.10.3)
- prometheus-client-mmap (~> 0.28, >= 0.28.1)
+ prometheus-client-mmap (~> 1.0)
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.4)
@@ -2056,8 +2053,8 @@ DEPENDENCIES
sentry-ruby (~> 5.8.0)
sentry-sidekiq (~> 5.8.0)
shoulda-matchers (~> 5.1.0)
- sidekiq (~> 7.1.6)
- sidekiq-cron (~> 1.9.0)
+ sidekiq (~> 6.5.10)
+ sidekiq-cron (~> 1.8.0)
sigdump (~> 0.2.4)
simple_po_parser (~> 1.1.6)
simplecov (~> 0.21)
diff --git a/app/assets/javascripts/behaviors/markdown/copy_as_gfm.js b/app/assets/javascripts/behaviors/markdown/copy_as_gfm.js
index 36317444af9..72aae254584 100644
--- a/app/assets/javascripts/behaviors/markdown/copy_as_gfm.js
+++ b/app/assets/javascripts/behaviors/markdown/copy_as_gfm.js
@@ -152,7 +152,9 @@ export class CopyAsGFM {
if (lineElements.length > 0) {
for (let i = 0; i < lineElements.length; i += 1) {
const lineElement = lineElements[i];
- codeElement.appendChild(lineElement);
+ const line = document.createElement('span');
+ line.append(...lineElement.childNodes);
+ codeElement.appendChild(line);
codeElement.appendChild(document.createTextNode('\n'));
}
} else {
diff --git a/app/assets/javascripts/groups/components/group_item.vue b/app/assets/javascripts/groups/components/group_item.vue
index af1af86d0c4..3a08e3e546f 100644
--- a/app/assets/javascripts/groups/components/group_item.vue
+++ b/app/assets/javascripts/groups/components/group_item.vue
@@ -243,7 +243,7 @@ export default {
</div>
</gl-popover>
</template>
- <user-access-role-badge v-if="group.permission" class="gl-mr-3">
+ <user-access-role-badge v-if="group.permission" size="sm" class="gl-mr-3">
{{ group.permission }}
</user-access-role-badge>
<gl-label
@@ -254,7 +254,7 @@ export default {
size="sm"
/>
</div>
- <div v-if="group.description" class="description">
+ <div v-if="group.description" class="description gl-font-sm gl-mt-1">
<span
v-safe-html:[$options.safeHtmlConfig]="group.description"
:itemprop="microdata.descriptionItemprop"
diff --git a/app/assets/javascripts/groups/components/item_stats.vue b/app/assets/javascripts/groups/components/item_stats.vue
index d87190edfd2..55c5ef2ae80 100644
--- a/app/assets/javascripts/groups/components/item_stats.vue
+++ b/app/assets/javascripts/groups/components/item_stats.vue
@@ -68,7 +68,7 @@ export default {
css-class="project-stars"
icon-name="star"
/>
- <div v-if="isProject" class="last-updated">
+ <div v-if="isProject" class="last-updated gl-font-sm">
<time-ago-tooltip :time="item.lastActivityAt" tooltip-placement="bottom" />
</div>
</div>
diff --git a/app/assets/javascripts/repository/components/blob_content_viewer.vue b/app/assets/javascripts/repository/components/blob_content_viewer.vue
index 97a1cbda5d0..264dbff525b 100644
--- a/app/assets/javascripts/repository/components/blob_content_viewer.vue
+++ b/app/assets/javascripts/repository/components/blob_content_viewer.vue
@@ -79,7 +79,7 @@ export default {
const urlHash = getLocationHash(); // If there is a code line hash in the URL we render with the simple viewer
const useSimpleViewer = usePlain || urlHash?.startsWith('L') || !this.hasRichViewer;
- this.initHighlightWorker(this.blobInfo);
+ this.initHighlightWorker(this.blobInfo, this.isUsingLfs);
this.switchViewer(useSimpleViewer ? SIMPLE_BLOB_VIEWER : RICH_BLOB_VIEWER); // By default, if present, use the rich viewer to render
},
error() {
diff --git a/app/assets/javascripts/repository/components/blob_viewers/index.js b/app/assets/javascripts/repository/components/blob_viewers/index.js
index d434700b29f..016f7f9fe43 100644
--- a/app/assets/javascripts/repository/components/blob_viewers/index.js
+++ b/app/assets/javascripts/repository/components/blob_viewers/index.js
@@ -1,4 +1,4 @@
-import { TEXT_FILE_TYPE, JSON_LANGUAGE } from '../../constants';
+import { TEXT_FILE_TYPE } from '../../constants';
export const viewers = {
csv: () => import('./csv_viewer.vue'),
@@ -17,12 +17,10 @@ export const viewers = {
geo_json: () => import('./geo_json/geo_json_viewer.vue'),
};
-export const loadViewer = (type, isUsingLfs, hljsWorkerEnabled, language) => {
+export const loadViewer = (type, isUsingLfs, hljsWorkerEnabled) => {
let viewer = viewers[type];
- if (hljsWorkerEnabled && language === JSON_LANGUAGE && type === TEXT_FILE_TYPE) {
- // The New Source Viewer currently only supports JSON files.
- // More language support will be added in: https://gitlab.com/gitlab-org/gitlab/-/issues/415753
+ if (hljsWorkerEnabled && type === TEXT_FILE_TYPE) {
viewer = () => import('~/vue_shared/components/source_viewer/source_viewer_new.vue');
}
diff --git a/app/assets/javascripts/repository/mixins/highlight_mixin.js b/app/assets/javascripts/repository/mixins/highlight_mixin.js
index 5b6f68681bb..fa4f0f48512 100644
--- a/app/assets/javascripts/repository/mixins/highlight_mixin.js
+++ b/app/assets/javascripts/repository/mixins/highlight_mixin.js
@@ -8,6 +8,8 @@ import { splitIntoChunks } from '~/vue_shared/components/source_viewer/workers/h
import LineHighlighter from '~/blob/line_highlighter';
import languageLoader from '~/content_editor/services/highlight_js_language_loader';
import Tracking from '~/tracking';
+import axios from '~/lib/utils/axios_utils';
+import { TEXT_FILE_TYPE } from '../constants';
/*
* This mixin is intended to be used as an interface between our highlight worker and Vue components
@@ -36,14 +38,29 @@ export default {
this.trackEvent(EVENT_LABEL_FALLBACK, language);
this?.onError();
},
- initHighlightWorker({ rawTextBlob, language, fileType }) {
- if (language !== 'json' || !this.glFeatures.highlightJsWorker) return;
+ async handleLFSBlob(externalStorageUrl, rawPath, language) {
+ await axios
+ .get(externalStorageUrl || rawPath)
+ .then(({ data }) => this.instructWorker(data, language))
+ .catch(() => this.$emit('error'));
+ },
+ initHighlightWorker(blob, isUsingLfs) {
+ const { rawTextBlob, language, fileType, externalStorageUrl, rawPath, simpleViewer } = blob;
+
+ if (!this.glFeatures.highlightJsWorker || simpleViewer?.fileType !== TEXT_FILE_TYPE) return;
if (this.isUnsupportedLanguage(language)) {
this.handleUnsupportedLanguage(language);
return;
}
+ this.highlightWorker.onmessage = this.handleWorkerMessage;
+
+ if (isUsingLfs) {
+ this.handleLFSBlob(externalStorageUrl, rawPath, language);
+ return;
+ }
+
/*
* We want to start rendering content as soon as possible, but highlighting large amounts of
* content can take long, so we render the content in phases:
@@ -64,8 +81,6 @@ export default {
this.chunks = splitIntoChunks(language, firstSeventyLines);
- this.highlightWorker.onmessage = this.handleWorkerMessage;
-
// Instruct the worker to highlight the first 70 lines ASAP, this improves perceived performance.
this.instructWorker(firstSeventyLines, language);
diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/constants.js b/app/assets/javascripts/vue_shared/components/source_viewer/constants.js
index 582093e5739..47b802d9d17 100644
--- a/app/assets/javascripts/vue_shared/components/source_viewer/constants.js
+++ b/app/assets/javascripts/vue_shared/components/source_viewer/constants.js
@@ -14,6 +14,7 @@ export const ROUGE_TO_HLJS_LANGUAGE_MAP = {
clean: 'clean',
clojure: 'clojure',
cmake: 'cmake',
+ codeowners: 'codeowners',
coffeescript: 'coffeescript',
coq: 'coq',
cpp: 'cpp',
diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue
index dcefa66c403..7ced12952dd 100644
--- a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue
+++ b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue
@@ -5,7 +5,7 @@ import SafeHtml from '~/vue_shared/directives/safe_html';
import Tracking from '~/tracking';
import addBlobLinksTracking from '~/blob/blob_links_tracking';
import LineHighlighter from '~/blob/line_highlighter';
-import { EVENT_ACTION, EVENT_LABEL_VIEWER } from './constants';
+import { EVENT_ACTION, EVENT_LABEL_VIEWER, CODEOWNERS_FILE_NAME } from './constants';
import Chunk from './components/chunk_new.vue';
import Blame from './components/blame_info.vue';
import { calculateBlameOffset, shouldRender, toggleBlameClasses } from './utils';
@@ -21,6 +21,7 @@ export default {
components: {
Chunk,
Blame,
+ CodeownersValidation: () => import('ee_component/blob/components/codeowners_validation.vue'),
},
directives: {
SafeHtml,
@@ -45,6 +46,10 @@ export default {
type: String,
required: true,
},
+ currentRef: {
+ type: String,
+ required: true,
+ },
},
data() {
return {
@@ -66,6 +71,9 @@ export default {
return result;
}, []);
},
+ isCodeownersFile() {
+ return this.blob.name === CODEOWNERS_FILE_NAME;
+ },
},
watch: {
showBlame: {
@@ -136,11 +144,18 @@ export default {
<blame v-if="showBlame && blameInfo.length" :blame-info="blameInfo" />
<div
- class="file-content code js-syntax-highlight blob-content gl-display-flex gl-flex-direction-column gl-overflow-auto gl-w-full"
+ class="file-content code js-syntax-highlight blob-content gl-display-flex gl-flex-direction-column gl-overflow-auto gl-w-full blob-viewer"
:class="$options.userColorScheme"
data-type="simple"
:data-path="blob.path"
>
+ <codeowners-validation
+ v-if="isCodeownersFile"
+ class="gl-text-black-normal"
+ :current-ref="currentRef"
+ :project-path="projectPath"
+ :file-path="blob.path"
+ />
<chunk
v-for="(chunk, index) in chunks"
:key="index"
diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_utils.js b/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_utils.js
index 8d8e945cd5f..057a1c2d113 100644
--- a/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_utils.js
+++ b/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_utils.js
@@ -1,13 +1,35 @@
import hljs from 'highlight.js/lib/core';
-import json from 'highlight.js/lib/languages/json';
+import languageLoader from '~/content_editor/services/highlight_js_language_loader';
import { registerPlugins } from '../plugins/index';
import { LINES_PER_CHUNK, NEWLINE, ROUGE_TO_HLJS_LANGUAGE_MAP } from '../constants';
-const initHighlightJs = (fileType, content, language) => {
- // The Highlight Worker is currently scoped to JSON files.
- // See the following issue for more: https://gitlab.com/gitlab-org/gitlab/-/issues/415753
- hljs.registerLanguage(language, json);
+const loadLanguage = async (language) => {
+ const languageDefinition = await languageLoader[language]();
+ hljs.registerLanguage(language, languageDefinition.default);
+};
+
+const loadSubLanguages = async (languageDefinition) => {
+ // Some files can contain sub-languages (i.e., Svelte); this ensures that sub-languages are also loaded
+ if (!languageDefinition?.contains) return;
+
+ // generate list of languages to load
+ const languages = new Set(
+ languageDefinition.contains
+ .filter((component) => Boolean(component.subLanguage))
+ .map((component) => component.subLanguage),
+ );
+
+ if (languageDefinition.subLanguage) {
+ languages.add(languageDefinition.subLanguage);
+ }
+
+ await Promise.all([...languages].map(loadLanguage));
+};
+
+const initHighlightJs = async (fileType, content, language) => {
registerPlugins(hljs, fileType, content, true);
+ await loadLanguage(language);
+ await loadSubLanguages(hljs.getLanguage(language));
};
const splitByLineBreaks = (content = '') => content.split(/\r?\n/);
@@ -35,12 +57,12 @@ const splitIntoChunks = (language, rawContent, highlightedContent) => {
return result;
};
-const highlight = (fileType, rawContent, lang) => {
+const highlight = async (fileType, rawContent, lang) => {
const language = ROUGE_TO_HLJS_LANGUAGE_MAP[lang.toLowerCase()];
let result;
if (language) {
- initHighlightJs(fileType, rawContent, language);
+ await initHighlightJs(fileType, rawContent, language);
const highlightedContent = hljs.highlight(rawContent, { language }).value;
result = splitIntoChunks(language, rawContent, highlightedContent);
}
diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_worker.js b/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_worker.js
index 535e857d7a9..49afaba3d2f 100644
--- a/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_worker.js
+++ b/app/assets/javascripts/vue_shared/components/source_viewer/workers/highlight_worker.js
@@ -4,7 +4,7 @@ import { highlight } from './highlight_utils';
* A webworker for highlighting large amounts of content with Highlight.js
*/
// eslint-disable-next-line no-restricted-globals
-self.addEventListener('message', ({ data: { fileType, content, language } }) => {
+self.addEventListener('message', async ({ data: { fileType, content, language } }) => {
// eslint-disable-next-line no-restricted-globals
- self.postMessage(highlight(fileType, content, language));
+ self.postMessage(await highlight(fileType, content, language));
});
diff --git a/app/assets/javascripts/vue_shared/components/user_access_role_badge.vue b/app/assets/javascripts/vue_shared/components/user_access_role_badge.vue
index e5558c038b3..43e35f2b1f0 100644
--- a/app/assets/javascripts/vue_shared/components/user_access_role_badge.vue
+++ b/app/assets/javascripts/vue_shared/components/user_access_role_badge.vue
@@ -12,11 +12,18 @@ export default {
components: {
GlBadge,
},
+ props: {
+ size: {
+ type: String,
+ required: false,
+ default: 'md',
+ },
+ },
};
</script>
<template>
- <gl-badge class="gl-bg-transparent! gl-inset-border-1-gray-100!">
+ <gl-badge :size="size" class="gl-bg-transparent! gl-inset-border-1-gray-100!">
<slot></slot>
</gl-badge>
</template>
diff --git a/app/assets/javascripts/work_items/components/shared/work_item_link_child_contents.vue b/app/assets/javascripts/work_items/components/shared/work_item_link_child_contents.vue
index cbe7de4abcd..503328f7b03 100644
--- a/app/assets/javascripts/work_items/components/shared/work_item_link_child_contents.vue
+++ b/app/assets/javascripts/work_items/components/shared/work_item_link_child_contents.vue
@@ -58,11 +58,6 @@ export default {
default: true,
},
},
- data() {
- return {
- isFocused: false,
- };
- },
computed: {
labels() {
return this.metadataWidgets[WIDGET_TYPE_LABELS]?.labels?.nodes || [];
@@ -117,7 +112,7 @@ export default {
return false;
},
showRemove() {
- return this.canUpdate && this.isFocused;
+ return this.canUpdate;
},
displayLabels() {
return this.showLabels && this.labels.length;
@@ -135,10 +130,6 @@ export default {
<div
class="item-body work-item-link-child gl-relative gl-display-flex gl-flex-grow-1 gl-overflow-break-word gl-min-w-0 gl-pl-3 gl-pr-2 gl-py-2 gl-mx-n2 gl-rounded-base gl-gap-3"
data-testid="links-child"
- @mouseover="isFocused = true"
- @mouseleave="isFocused = false"
- @focusin="isFocused = true"
- @focusout="isFocused = false"
>
<div class="item-contents gl-display-flex gl-flex-grow-1 gl-flex-wrap gl-min-w-0">
<div
@@ -203,12 +194,14 @@ export default {
</div>
<div v-if="canUpdate">
<gl-button
+ v-gl-tooltip
:class="{ 'gl-visibility-visible': showRemove }"
class="gl-visibility-hidden"
category="tertiary"
size="small"
icon="close"
:aria-label="$options.i18n.remove"
+ :title="$options.i18n.remove"
data-testid="remove-work-item-link"
@click="$emit('removeChild', childItem)"
/>
diff --git a/app/assets/javascripts/work_items/components/work_item_links/work_item_link_child.vue b/app/assets/javascripts/work_items/components/work_item_links/work_item_link_child.vue
index 49454c3d9f3..f43718c4cb8 100644
--- a/app/assets/javascripts/work_items/components/work_item_links/work_item_link_child.vue
+++ b/app/assets/javascripts/work_items/components/work_item_links/work_item_link_child.vue
@@ -213,10 +213,10 @@ export default {
</script>
<template>
- <li class="tree-item">
+ <li class="tree-item gl-p-0! gl-border-bottom-0!">
<div
class="gl-display-flex gl-align-items-flex-start"
- :class="{ 'gl-ml-6': canHaveChildren && !hasChildren && hasIndirectChildren }"
+ :class="{ 'gl-ml-5 gl-pl-2': canHaveChildren && !hasChildren && hasIndirectChildren }"
>
<gl-button
v-if="hasChildren"
@@ -227,7 +227,7 @@ export default {
category="tertiary"
size="small"
:loading="isLoadingChildren"
- class="gl-px-0! gl-py-3! gl-mr-3"
+ class="gl-px-0! gl-py-3! gl-mr-2"
data-testid="expand-child"
@click="toggleItem"
/>
diff --git a/app/assets/javascripts/work_items/components/work_item_links/work_item_tree.vue b/app/assets/javascripts/work_items/components/work_item_links/work_item_tree.vue
index 3d09a90169c..09d2e688174 100644
--- a/app/assets/javascripts/work_items/components/work_item_links/work_item_tree.vue
+++ b/app/assets/javascripts/work_items/components/work_item_links/work_item_tree.vue
@@ -146,39 +146,39 @@ export default {
/>
</template>
<template #body>
- <div v-if="!isShownAddForm && children.length === 0" data-testid="tree-empty">
- <div class="gl-new-card-content">
+ <div class="gl-new-card-content">
+ <div v-if="!isShownAddForm && children.length === 0" data-testid="tree-empty">
<p class="gl-new-card-empty">
{{ $options.WORK_ITEMS_TREE_TEXT_MAP[workItemType].empty }}
</p>
</div>
+ <work-item-links-form
+ v-if="isShownAddForm"
+ ref="wiLinksForm"
+ data-testid="add-tree-form"
+ :full-path="fullPath"
+ :issuable-gid="workItemId"
+ :work-item-iid="workItemIid"
+ :form-type="formType"
+ :parent-work-item-type="parentWorkItemType"
+ :children-type="childType"
+ :children-ids="childrenIds"
+ :parent-confidential="confidential"
+ @cancel="hideAddForm"
+ @addChild="$emit('addChild')"
+ />
+ <work-item-children-wrapper
+ :children="children"
+ :can-update="canUpdate"
+ :full-path="fullPath"
+ :work-item-id="workItemId"
+ :work-item-iid="workItemIid"
+ :work-item-type="workItemType"
+ :show-labels="showLabels"
+ @error="error = $event"
+ @show-modal="showModal"
+ />
</div>
- <work-item-links-form
- v-if="isShownAddForm"
- ref="wiLinksForm"
- data-testid="add-tree-form"
- :full-path="fullPath"
- :issuable-gid="workItemId"
- :work-item-iid="workItemIid"
- :form-type="formType"
- :parent-work-item-type="parentWorkItemType"
- :children-type="childType"
- :children-ids="childrenIds"
- :parent-confidential="confidential"
- @cancel="hideAddForm"
- @addChild="$emit('addChild')"
- />
- <work-item-children-wrapper
- :children="children"
- :can-update="canUpdate"
- :full-path="fullPath"
- :work-item-id="workItemId"
- :work-item-iid="workItemIid"
- :work-item-type="workItemType"
- :show-labels="showLabels"
- @error="error = $event"
- @show-modal="showModal"
- />
</template>
</widget-wrapper>
</template>
diff --git a/app/services/metrics_service.rb b/app/services/metrics_service.rb
index f39cc1a8534..d27328f89cd 100644
--- a/app/services/metrics_service.rb
+++ b/app/services/metrics_service.rb
@@ -4,11 +4,7 @@ require 'prometheus/client/formats/text'
class MetricsService
def prometheus_metrics_text
- if Feature.enabled?(:prom_metrics_rust)
- ::Prometheus::Client::Formats::Text.marshal_multiprocess(multiprocess_metrics_path, use_rust: true)
- else
- ::Prometheus::Client::Formats::Text.marshal_multiprocess(multiprocess_metrics_path)
- end
+ ::Prometheus::Client::Formats::Text.marshal_multiprocess(multiprocess_metrics_path)
end
def metrics_text
diff --git a/config/feature_flags/development/access_rest_chat.yml b/config/feature_flags/development/access_rest_chat.yml
new file mode 100644
index 00000000000..8a280ef3485
--- /dev/null
+++ b/config/feature_flags/development/access_rest_chat.yml
@@ -0,0 +1,8 @@
+---
+name: access_rest_chat
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/133015
+rollout_issue_url:
+milestone: '16.5'
+type: development
+group: group::ai framework
+default_enabled: false
diff --git a/config/feature_flags/development/prom_metrics_rust.yml b/config/feature_flags/development/prom_metrics_rust.yml
deleted file mode 100644
index 3b4a4e23a08..00000000000
--- a/config/feature_flags/development/prom_metrics_rust.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: prom_metrics_rust
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/119005
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/409023
-milestone: '16.0'
-type: development
-group: group::gitaly::cluster
-default_enabled: false
diff --git a/config/initializers/7_redis.rb b/config/initializers/7_redis.rb
index d992cc3a58c..25c2c6aa11f 100644
--- a/config/initializers/7_redis.rb
+++ b/config/initializers/7_redis.rb
@@ -27,9 +27,6 @@ Redis::Cluster::SlotLoader.prepend(Gitlab::Patch::SlotLoader)
Redis::Cluster::CommandLoader.prepend(Gitlab::Patch::CommandLoader)
Redis::Cluster.prepend(Gitlab::Patch::RedisCluster)
-# this only instruments `RedisClient` used in `Sidekiq.redis`
-RedisClient.register(Gitlab::Instrumentation::RedisClientMiddleware)
-
if Gitlab::Redis::Workhorse.params[:cluster].present?
raise "Do not configure workhorse with a Redis Cluster as pub/sub commands are not cluster-compatible."
end
diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb
index 3e2087f717a..8df12671f26 100644
--- a/config/initializers/sidekiq.rb
+++ b/config/initializers/sidekiq.rb
@@ -28,25 +28,21 @@ def enable_semi_reliable_fetch_mode?
end
# Custom Queues configuration
-queues_config_hash = Gitlab::Redis::Queues.redis_client_params
+queues_config_hash = Gitlab::Redis::Queues.params
enable_json_logs = Gitlab.config.sidekiq.log_format != 'text'
-# Sidekiq's `strict_args!` raises an exception by default in 7.0
-# https://github.com/sidekiq/sidekiq/blob/31bceff64e10d501323bc06ac0552652a47c082e/docs/7.0-Upgrade.md?plain=1#L59
-Sidekiq.strict_args!(false)
-
Sidekiq.configure_server do |config|
config[:strict] = false
config[:queues] = Gitlab::SidekiqConfig.expand_queues(config[:queues])
if enable_json_logs
- config.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new
+ config.log_formatter = Gitlab::SidekiqLogging::JSONFormatter.new
config[:job_logger] = Gitlab::SidekiqLogging::StructuredLogger
# Remove the default-provided handler. The exception is logged inside
# Gitlab::SidekiqLogging::StructuredLogger
- config.error_handlers.delete(Sidekiq::Config::ERROR_HANDLER)
+ config.error_handlers.delete(Sidekiq::DEFAULT_ERROR_HANDLER)
end
Sidekiq.logger.info "Listening on queues #{config[:queues].uniq.sort}"
@@ -111,8 +107,8 @@ Sidekiq.configure_client do |config|
# We only need to do this for other clients. If Sidekiq-server is the
# client scheduling jobs, we have access to the regular sidekiq logger that
# writes to STDOUT
- config.logger = Gitlab::SidekiqLogging::ClientLogger.build
- config.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
+ Sidekiq.logger = Gitlab::SidekiqLogging::ClientLogger.build
+ Sidekiq.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new if enable_json_logs
config.client_middleware(&Gitlab::SidekiqMiddleware.client_configurator)
end
diff --git a/config/initializers/sidekiq_cluster.rb b/config/initializers/sidekiq_cluster.rb
index 4773152d912..5851e3bd838 100644
--- a/config/initializers/sidekiq_cluster.rb
+++ b/config/initializers/sidekiq_cluster.rb
@@ -19,7 +19,7 @@ if ENV['ENABLE_SIDEKIQ_CLUSTER']
# Allow sidekiq to cleanly terminate and push any running jobs back
# into the queue. We use the configured timeout and add a small
# grace period
- sleep(Sidekiq.default_configuration[:timeout] + 5)
+ sleep(Sidekiq[:timeout] + 5)
# Signaling the Sidekiq Pgroup as KILL is not forwarded to
# a possible child process. In Sidekiq Cluster, all child Sidekiq
diff --git a/config/webpack.config.js b/config/webpack.config.js
index 438c8ec2f07..038595df1bd 100644
--- a/config/webpack.config.js
+++ b/config/webpack.config.js
@@ -369,8 +369,7 @@ module.exports = {
{
loader: 'worker-loader',
options: {
- name: '[name].[contenthash:8].worker.js',
- inline: IS_DEV_SERVER,
+ filename: '[name].[contenthash:8].worker.js',
},
},
'babel-loader',
diff --git a/db/migrate/20231120071202_remove_not_null_constraint_from_member_role_column.rb b/db/migrate/20231120071202_remove_not_null_constraint_from_member_role_column.rb
new file mode 100644
index 00000000000..7acf26d16f5
--- /dev/null
+++ b/db/migrate/20231120071202_remove_not_null_constraint_from_member_role_column.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+class RemoveNotNullConstraintFromMemberRoleColumn < Gitlab::Database::Migration[2.2]
+ milestone '16.7'
+ disable_ddl_transaction!
+
+ def up
+ change_column_null :member_roles, :namespace_id, true
+ end
+
+ def down
+ change_column_null :member_roles, :namespace_id, false
+ end
+end
diff --git a/db/schema_migrations/20231120071202 b/db/schema_migrations/20231120071202
new file mode 100644
index 00000000000..0e665d356c8
--- /dev/null
+++ b/db/schema_migrations/20231120071202
@@ -0,0 +1 @@
+0f5d6648e45d82c4db18a8886ec04f5af73b8d3d7cc92b90119027d5a6fc8f80 \ No newline at end of file
diff --git a/doc/administration/logs/index.md b/doc/administration/logs/index.md
index b760f0b6217..b32d19f174f 100644
--- a/doc/administration/logs/index.md
+++ b/doc/administration/logs/index.md
@@ -985,8 +985,15 @@ For example:
## `geo.log` **(PREMIUM SELF)**
-Geo stores structured log messages in a `geo.log` file. For Linux package
-installations, this file is at `/var/log/gitlab/gitlab-rails/geo.log`.
+Geo stores structured log messages in a `geo.log` file. For Linux package installations,
+this file is at `/var/log/gitlab/gitlab-rails/geo.log`.
+
+For Helm chart installations, it's stored in the Sidekiq pod, at `/var/log/gitlab/geo.log`.
+It can be read by either directly accessing the file, or by using `kubectl` to fetch the Sidekiq logs, and subsequently filtering the results by `"subcomponent"=="geo"`. The example below uses `jq` to grab only Geo logs:
+
+```shell
+kubectl logs -l app=sidekiq --max-log-requests=50 | jq 'select(."subcomponent"=="geo")'
+```
This file contains information about when Geo attempts to sync repositories
and files. Each line in the file contains a separate JSON entry that can be
diff --git a/doc/administration/nfs.md b/doc/administration/nfs.md
index 119f757bfbc..c3ea1baf954 100644
--- a/doc/administration/nfs.md
+++ b/doc/administration/nfs.md
@@ -258,7 +258,6 @@ following are the 4 locations need to be shared:
| Location | Description | Default configuration |
| -------- | ----------- | --------------------- |
-| `/var/opt/gitlab/git-data` | Git repository data. This accounts for a large portion of your data | `git_data_dirs({"default" => { "path" => "/var/opt/gitlab/git-data"} })`
| `/var/opt/gitlab/gitlab-rails/uploads` | User uploaded attachments | `gitlab_rails['uploads_directory'] = '/var/opt/gitlab/gitlab-rails/uploads'`
| `/var/opt/gitlab/gitlab-rails/shared` | Objects such as build artifacts, GitLab Pages, LFS objects, and temp files. If you're using LFS this may also account for a large portion of your data | `gitlab_rails['shared_path'] = '/var/opt/gitlab/gitlab-rails/shared'`
| `/var/opt/gitlab/gitlab-ci/builds` | GitLab CI/CD build traces | `gitlab_ci['builds_directory'] = '/var/opt/gitlab/gitlab-ci/builds'`
diff --git a/doc/administration/postgresql/replication_and_failover.md b/doc/administration/postgresql/replication_and_failover.md
index 8403177952a..5c438fa65b8 100644
--- a/doc/administration/postgresql/replication_and_failover.md
+++ b/doc/administration/postgresql/replication_and_failover.md
@@ -1309,6 +1309,18 @@ postgresql['trust_auth_cidr_addresses'] = %w(123.123.123.123/32 <other_cidrs>)
[Reconfigure GitLab](../restart_gitlab.md#reconfigure-a-linux-package-installation) for the changes to take effect.
+### PgBouncer errors `Error running command: GitlabCtl::Errors::ExecutionError` and `ERROR: database gitlabhq_production is not paused`
+
+In versions of GitLab prior to 16.5.0, the automatic failover of PgBouncer does not
+happen after a Patroni switchover. GitLab failed to detect a paused database, then
+attempted to `RESUME` a not-paused database:
+
+```plaintext
+INFO -- : Running: gitlab-ctl pgb-notify --pg-database gitlabhq_production --newhost database7.example.com --user pgbouncer --hostuser gitlab-consul
+ERROR -- : STDERR: Error running command: GitlabCtl::Errors::ExecutionError
+ERROR -- : STDERR: ERROR: ERROR: database gitlabhq_production is not paused
+```
+
### Reinitialize a replica
If a replica cannot start or rejoin the cluster, or when it lags behind and cannot catch up, it might be necessary to reinitialize the replica:
diff --git a/doc/subscriptions/customers_portal.md b/doc/subscriptions/customers_portal.md
index 45a4324f45d..ed587070ee7 100644
--- a/doc/subscriptions/customers_portal.md
+++ b/doc/subscriptions/customers_portal.md
@@ -53,9 +53,8 @@ The account owner's personal details are used on invoices. The account owner's e
To change account owner information, including name, billing address, and email address:
1. Sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
-1. Select **My account > Account details**.
-1. Expand the **Personal details** section.
-1. Edit the personal details.
+1. Select **My profile > Profile settings**.
+1. Edit **Your personal details**.
1. Select **Save changes**.
If you want to transfer ownership of the Customers Portal account
@@ -68,9 +67,8 @@ to another person, after you enter that person's personal details, you must also
To change your company details, including company name and VAT number:
1. Sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
-1. Select **My account > Account details**.
-1. Expand the **Company details** section.
-1. Edit the company details.
+1. Select **My profile > Profile settings**.
+1. Edit **Your company details**.
1. Select **Save changes**.
## Change your payment method
@@ -85,7 +83,7 @@ If you would like to use an alternative method to pay, please
To change your payment method:
1. Sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
-1. Select **My account > Payment methods**.
+1. Select **My profile > Payment methods**.
1. **Edit** an existing payment method's information or **Add new payment method**.
1. Select **Save Changes**.
@@ -95,7 +93,7 @@ Automatic renewal of a subscription is charged to your default payment method. T
method as the default:
1. Sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
-1. Select **My account > Payment methods**.
+1. Select **My profile > Payment methods**.
1. **Edit** the selected payment method and check the **Make default payment method** checkbox.
1. Select **Save Changes**.
@@ -106,7 +104,7 @@ Follow this guideline if you have a legacy Customers Portal account and use an e
To link a GitLab.com account to your Customers Portal account:
1. Sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in?legacy=true) using email and password.
-1. On the Customers Portal page, select **My account > Account details**.
+1. On the Customers Portal page, select **My profile > Profile settings**.
1. Under **Your GitLab.com account**, select **Link account**.
1. Sign in to the [GitLab.com](https://gitlab.com/users/sign_in) account you want to link to the Customers Portal account.
@@ -120,7 +118,7 @@ To change the GitLab.com account linked to your Customers Portal account:
1. Sign in to the [Customers Portal](https://customers.gitlab.com/customers/sign_in).
1. In a separate browser tab, go to [GitLab.com](https://gitlab.com/users/sign_in) and ensure you are not logged in.
-1. On the Customers Portal page, select **My account > Account details**.
+1. On the Customers Portal page, select **My profile > Profile settings**.
1. Under **Your GitLab.com account**, select **Change linked account**.
1. Sign in to the [GitLab.com](https://gitlab.com/users/sign_in) account you want to link to the Customers Portal account.
diff --git a/doc/user/project/import/bitbucket.md b/doc/user/project/import/bitbucket.md
index 94444b29542..4556ac76668 100644
--- a/doc/user/project/import/bitbucket.md
+++ b/doc/user/project/import/bitbucket.md
@@ -37,18 +37,6 @@ The Bitbucket Cloud importer works only with [Bitbucket.org](https://bitbucket.o
Server (aka Stash). If you are trying to import projects from Bitbucket Server, use
[the Bitbucket Server importer](bitbucket_server.md).
-## Prerequisites
-
-> Requirement for Maintainer role instead of Developer role introduced in GitLab 16.0 and backported to GitLab 15.11.1 and GitLab 15.10.5.
-
-- [Bitbucket Cloud integration](../../../integration/bitbucket.md) must be enabled. If that integration is not enabled, ask your GitLab administrator
- to enable it. The Bitbucket Cloud integration is enabled by default on GitLab.com.
-- [Bitbucket Cloud import source](../../../administration/settings/import_and_export_settings.md#configure-allowed-import-sources) must be enabled. If not enabled, ask your
- GitLab administrator to enable it. The Bitbucket Cloud import source is enabled by default on GitLab.com.
-- At least the Maintainer role on the destination group to import to.
-
-## How it works
-
When issues/pull requests are being imported, the Bitbucket importer uses the Bitbucket nickname of
the author/assignee and tries to find the same Bitbucket identity in GitLab. If they don't match or
the user is not found in the GitLab database, the project creator (most of the times the current
@@ -59,7 +47,19 @@ The importer creates any new namespaces (groups) if they don't exist or in
the case the namespace is taken, the repository is imported under the user's
namespace that started the import process.
-## Requirements for user-mapped contributions
+## Prerequisites
+
+> Requirement for Maintainer role instead of Developer role introduced in GitLab 16.0 and backported to GitLab 15.11.1 and GitLab 15.10.5.
+
+- [Bitbucket Cloud integration](../../../integration/bitbucket.md) must be enabled. If that integration is not enabled, ask your GitLab administrator
+ to enable it. The Bitbucket Cloud integration is enabled by default on GitLab.com.
+- [Bitbucket Cloud import source](../../../administration/settings/import_and_export_settings.md#configure-allowed-import-sources) must be enabled. If not enabled, ask your
+ GitLab administrator to enable it. The Bitbucket Cloud import source is enabled by default on GitLab.com.
+- At least the Maintainer role on the destination group to import to.
+- Pull requests in Bitbucket must have the same source and destination project and not be from a fork of a project.
+ Otherwise, the pull requests are imported as empty merge requests.
+
+### Requirements for user-mapped contributions
For user contributions to be mapped, each user must complete the following before the project import:
diff --git a/lib/gitlab/instrumentation/redis_client_middleware.rb b/lib/gitlab/instrumentation/redis_client_middleware.rb
deleted file mode 100644
index 39f6bda4304..00000000000
--- a/lib/gitlab/instrumentation/redis_client_middleware.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-# This module references https://github.com/redis-rb/redis-client#instrumentation-and-middlewares
-# implementing `call`, and `call_pipelined`.
-module Gitlab
- module Instrumentation
- module RedisClientMiddleware
- include RedisHelper
-
- def call(command, redis_config)
- instrumentation = instrumentation_class(redis_config)
-
- result = instrument_call([command], instrumentation) do
- super
- end
-
- measure_io(command, result, instrumentation) if ::RequestStore.active?
-
- result
- end
-
- def call_pipelined(commands, redis_config)
- instrumentation = instrumentation_class(redis_config)
-
- result = instrument_call(commands, instrumentation, true) do
- super
- end
-
- measure_io(commands, result, instrumentation) if ::RequestStore.active?
-
- result
- end
-
- private
-
- def measure_io(command, result, instrumentation)
- measure_write_size(command, instrumentation)
- measure_read_size(result, instrumentation)
- end
-
- def instrumentation_class(config)
- config.custom[:instrumentation_class]
- end
- end
- end
-end
diff --git a/lib/gitlab/instrumentation/redis_helper.rb b/lib/gitlab/instrumentation/redis_helper.rb
index 392a7ebe852..ba1c8132250 100644
--- a/lib/gitlab/instrumentation/redis_helper.rb
+++ b/lib/gitlab/instrumentation/redis_helper.rb
@@ -15,7 +15,7 @@ module Gitlab
end
yield
- rescue ::Redis::BaseError, ::RedisClient::Error => ex
+ rescue ::Redis::BaseError => ex
if ex.message.start_with?('MOVED', 'ASK')
instrumentation_class.instance_count_cluster_redirection(ex)
else
diff --git a/lib/gitlab/memory/watchdog/handlers/sidekiq_handler.rb b/lib/gitlab/memory/watchdog/handlers/sidekiq_handler.rb
index 9da662d5f1b..47ed608c576 100644
--- a/lib/gitlab/memory/watchdog/handlers/sidekiq_handler.rb
+++ b/lib/gitlab/memory/watchdog/handlers/sidekiq_handler.rb
@@ -18,8 +18,8 @@ module Gitlab
return true unless @alive
# Tell sidekiq to restart itself
- # Keep extra safe to wait `Sidekiq.default_configuration[:timeout] + 2` seconds before SIGKILL
- send_signal(:TERM, $$, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2)
+ # Keep extra safe to wait `Sidekiq[:timeout] + 2` seconds before SIGKILL
+ send_signal(:TERM, $$, 'gracefully shut down', Sidekiq[:timeout] + 2)
return true unless @alive
# Ideally we should never reach this condition
diff --git a/lib/gitlab/patch/sidekiq_cron_poller.rb b/lib/gitlab/patch/sidekiq_cron_poller.rb
index a7de03aa969..8f1fbf53161 100644
--- a/lib/gitlab/patch/sidekiq_cron_poller.rb
+++ b/lib/gitlab/patch/sidekiq_cron_poller.rb
@@ -7,11 +7,11 @@
require 'sidekiq/version'
require 'sidekiq/cron/version'
-if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('7.1.6')
+if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
raise 'New version of sidekiq detected, please remove or update this patch'
end
-if Gem::Version.new(Sidekiq::Cron::VERSION) != Gem::Version.new('1.9.1')
+if Gem::Version.new(Sidekiq::Cron::VERSION) != Gem::Version.new('1.8.0')
raise 'New version of sidekiq-cron detected, please remove or update this patch'
end
diff --git a/lib/gitlab/redis/wrapper.rb b/lib/gitlab/redis/wrapper.rb
index 8991218de98..401ac50509d 100644
--- a/lib/gitlab/redis/wrapper.rb
+++ b/lib/gitlab/redis/wrapper.rb
@@ -19,7 +19,7 @@ module Gitlab
InvalidPathError = Class.new(StandardError)
class << self
- delegate :params, :url, :store, :encrypted_secrets, :redis_client_params, to: :new
+ delegate :params, :url, :store, :encrypted_secrets, to: :new
def with
pool.with { |redis| yield redis }
@@ -96,27 +96,6 @@ module Gitlab
redis_store_options
end
- # redis_client_params modifies redis_store_options to be compatible with redis-client
- # TODO: when redis-rb is updated to v5, there is no need to support 2 types of config format
- def redis_client_params
- options = redis_store_options
- options[:custom] = { instrumentation_class: options[:instrumentation_class] }
-
- # TODO: add support for cluster when upgrading to redis-rb v5.y.z we do not need cluster support
- # as Sidekiq workload should not and does not run in a Redis Cluster
- # support to be added in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134862
- if options[:sentinels]
- # name is required in RedisClient::SentinelConfig
- # https://github.com/redis-rb/redis-client/blob/1ab081c1d0e47df5d55e011c9390c70b2eef6731/lib/redis_client/sentinel_config.rb#L17
- options[:name] = options[:host]
- options.except(:scheme, :instrumentation_class, :host, :port)
- else
- # remove disallowed keys as seen in
- # https://github.com/redis-rb/redis-client/blob/1ab081c1d0e47df5d55e011c9390c70b2eef6731/lib/redis_client/config.rb#L21
- options.except(:scheme, :instrumentation_class)
- end
- end
-
def url
raw_config_hash[:url]
end
diff --git a/lib/gitlab/runtime.rb b/lib/gitlab/runtime.rb
index 1f836564d47..269fb74ceca 100644
--- a/lib/gitlab/runtime.rb
+++ b/lib/gitlab/runtime.rb
@@ -94,7 +94,7 @@ module Gitlab
#
# These threads execute Sidekiq client middleware when jobs
# are enqueued and those can access DB / Redis.
- threads += Sidekiq.default_configuration[:concurrency] + 2
+ threads += Sidekiq[:concurrency] + 2
end
if puma?
diff --git a/lib/gitlab/sidekiq_config.rb b/lib/gitlab/sidekiq_config.rb
index 62fd046981f..33a15d95d22 100644
--- a/lib/gitlab/sidekiq_config.rb
+++ b/lib/gitlab/sidekiq_config.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'yaml'
-require 'sidekiq/capsule'
module Gitlab
module SidekiqConfig
@@ -162,7 +161,7 @@ module Gitlab
# the current Sidekiq process
def current_worker_queue_mappings
worker_queue_mappings
- .select { |worker, queue| Sidekiq.default_configuration.queues.include?(queue) }
+ .select { |worker, queue| Sidekiq[:queues].include?(queue) }
.to_h
end
diff --git a/lib/gitlab/sidekiq_logging/structured_logger.rb b/lib/gitlab/sidekiq_logging/structured_logger.rb
index 4754417639f..c65d9c5ddd5 100644
--- a/lib/gitlab/sidekiq_logging/structured_logger.rb
+++ b/lib/gitlab/sidekiq_logging/structured_logger.rb
@@ -16,11 +16,11 @@ module Gitlab
ActiveRecord::LogSubscriber.reset_runtime
- @logger.info log_job_start(job, base_payload)
+ Sidekiq.logger.info log_job_start(job, base_payload)
yield
- @logger.info log_job_done(job, started_time, base_payload)
+ Sidekiq.logger.info log_job_done(job, started_time, base_payload)
rescue Sidekiq::JobRetry::Handled => job_exception
# Sidekiq::JobRetry::Handled is raised by the internal Sidekiq
# processor. It is a wrapper around real exception indicating an
@@ -29,11 +29,11 @@ module Gitlab
#
# For more information:
# https://github.com/mperham/sidekiq/blob/v5.2.7/lib/sidekiq/processor.rb#L173
- @logger.warn log_job_done(job, started_time, base_payload, job_exception.cause || job_exception)
+ Sidekiq.logger.warn log_job_done(job, started_time, base_payload, job_exception.cause || job_exception)
raise
rescue StandardError => job_exception
- @logger.warn log_job_done(job, started_time, base_payload, job_exception)
+ Sidekiq.logger.warn log_job_done(job, started_time, base_payload, job_exception)
raise
end
diff --git a/lib/gitlab/sidekiq_middleware/server_metrics.rb b/lib/gitlab/sidekiq_middleware/server_metrics.rb
index e65761fc1b6..37a9ed37891 100644
--- a/lib/gitlab/sidekiq_middleware/server_metrics.rb
+++ b/lib/gitlab/sidekiq_middleware/server_metrics.rb
@@ -64,7 +64,7 @@ module Gitlab
def initialize_process_metrics
metrics = self.metrics
- metrics[:sidekiq_concurrency].set({}, Sidekiq.default_configuration[:concurrency].to_i)
+ metrics[:sidekiq_concurrency].set({}, Sidekiq[:concurrency].to_i)
return unless ::Feature.enabled?(:sidekiq_job_completion_metric_initialize)
diff --git a/lib/gitlab/sidekiq_migrate_jobs.rb b/lib/gitlab/sidekiq_migrate_jobs.rb
index cf4893b8745..2467dd7ca43 100644
--- a/lib/gitlab/sidekiq_migrate_jobs.rb
+++ b/lib/gitlab/sidekiq_migrate_jobs.rb
@@ -16,14 +16,17 @@ module Gitlab
# Migrate jobs in SortedSets, i.e. scheduled and retry sets.
def migrate_set(sidekiq_set)
source_queues_regex = Regexp.union(mappings.keys)
+ cursor = 0
scanned = 0
migrated = 0
estimated_size = Sidekiq.redis { |c| c.zcard(sidekiq_set) }
logger&.info("Processing #{sidekiq_set} set. Estimated size: #{estimated_size}.")
- Sidekiq.redis do |c|
- c.zscan(sidekiq_set) do |job, score|
+ begin
+ cursor, jobs = Sidekiq.redis { |c| c.zscan(sidekiq_set, cursor) }
+
+ jobs.each do |(job, score)|
if scanned > 0 && scanned % LOG_FREQUENCY == 0
logger&.info("In progress. Scanned records: #{scanned}. Migrated records: #{migrated}.")
end
@@ -42,7 +45,7 @@ module Gitlab
migrated += migrate_job_in_set(sidekiq_set, job, score, job_hash)
end
- end
+ end while cursor.to_i != 0
logger&.info("Done. Scanned records: #{scanned}. Migrated records: #{migrated}.")
@@ -58,7 +61,7 @@ module Gitlab
logger&.info("List of queues based on routing rules: #{routing_rules_queues}")
Sidekiq.redis do |conn|
# Redis 6 supports conn.scan_each(match: "queue:*", type: 'list')
- conn.scan("MATCH", "queue:*") do |key|
+ conn.scan_each(match: "queue:*") do |key|
# Redis 5 compatibility
next unless conn.type(key) == 'list'
@@ -98,9 +101,13 @@ module Gitlab
Sidekiq.redis do |connection|
removed = connection.zrem(sidekiq_set, job)
- connection.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash)) if removed > 0
+ if removed
+ connection.zadd(sidekiq_set, score, Gitlab::Json.dump(job_hash))
- removed
+ 1
+ else
+ 0
+ end
end
end
diff --git a/package.json b/package.json
index 1bd3a9de9cb..db0baebaf99 100644
--- a/package.json
+++ b/package.json
@@ -225,7 +225,7 @@
"webpack-bundle-analyzer": "^4.9.1",
"webpack-cli": "^4.10.0",
"webpack-stats-plugin": "^0.3.1",
- "worker-loader": "^2.0.0",
+ "worker-loader": "^3.0.8",
"xterm": "3.14.5",
"yaml": "^2.0.0-10"
},
diff --git a/spec/frontend/repository/mixins/highlight_mixin_spec.js b/spec/frontend/repository/mixins/highlight_mixin_spec.js
index 50cfd71d686..8d9317b3a87 100644
--- a/spec/frontend/repository/mixins/highlight_mixin_spec.js
+++ b/spec/frontend/repository/mixins/highlight_mixin_spec.js
@@ -1,7 +1,11 @@
import { shallowMount } from '@vue/test-utils';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import { splitIntoChunks } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
import highlightMixin from '~/repository/mixins/highlight_mixin';
import LineHighlighter from '~/blob/line_highlighter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { TEXT_FILE_TYPE } from '~/repository/constants';
import { LINES_PER_CHUNK } from '~/vue_shared/components/source_viewer/constants';
@@ -11,6 +15,7 @@ jest.mock('~/vue_shared/components/source_viewer/workers/highlight_utils', () =>
splitIntoChunks: jest.fn().mockResolvedValue([]),
}));
+const mockAxios = new MockAdapter(axios);
const workerMock = { postMessage: jest.fn() };
const onErrorMock = jest.fn();
@@ -21,7 +26,10 @@ describe('HighlightMixin', () => {
const rawTextBlob = contentArray.join('\n');
const languageMock = 'json';
- const createComponent = ({ fileType = TEXT_FILE_TYPE, language = languageMock } = {}) => {
+ const createComponent = (
+ { fileType = TEXT_FILE_TYPE, language = languageMock, externalStorageUrl, rawPath } = {},
+ isUsingLfs = false,
+ ) => {
const simpleViewer = { fileType };
const dummyComponent = {
@@ -32,7 +40,10 @@ describe('HighlightMixin', () => {
},
template: '<div>{{chunks[0]?.highlightedContent}}</div>',
created() {
- this.initHighlightWorker({ rawTextBlob, simpleViewer, language, fileType });
+ this.initHighlightWorker(
+ { rawTextBlob, simpleViewer, language, fileType, externalStorageUrl, rawPath },
+ isUsingLfs,
+ );
},
methods: { onError: onErrorMock },
};
@@ -45,13 +56,6 @@ describe('HighlightMixin', () => {
describe('initHighlightWorker', () => {
const firstSeventyLines = contentArray.slice(0, LINES_PER_CHUNK).join('\n');
- it('does not instruct worker if file is not a JSON file', () => {
- workerMock.postMessage.mockClear();
- createComponent({ language: 'javascript' });
-
- expect(workerMock.postMessage).not.toHaveBeenCalled();
- });
-
it('generates a chunk for the first 70 lines of raw text', () => {
expect(splitIntoChunks).toHaveBeenCalledWith(languageMock, firstSeventyLines);
});
@@ -87,4 +91,32 @@ describe('HighlightMixin', () => {
expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
});
});
+
+ describe('LFS blobs', () => {
+ const rawPath = '/org/project/-/raw/file.xml';
+ const externalStorageUrl = 'http://127.0.0.1:9000/lfs-objects/91/12/1341234';
+ const mockParams = { content: rawTextBlob, language: languageMock, fileType: TEXT_FILE_TYPE };
+
+ afterEach(() => mockAxios.reset());
+
+ it('Uses externalStorageUrl to fetch content if present', async () => {
+ mockAxios.onGet(externalStorageUrl).replyOnce(HTTP_STATUS_OK, rawTextBlob);
+ createComponent({ rawPath, externalStorageUrl }, true);
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(externalStorageUrl);
+ expect(workerMock.postMessage).toHaveBeenCalledWith(mockParams);
+ });
+
+ it('Falls back to rawPath to fetch content', async () => {
+ mockAxios.onGet(rawPath).replyOnce(HTTP_STATUS_OK, rawTextBlob);
+ createComponent({ rawPath }, true);
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(mockAxios.history.get[0].url).toBe(rawPath);
+ expect(workerMock.postMessage).toHaveBeenCalledWith(mockParams);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
index 49e3083f8ed..c84a39274f8 100644
--- a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
@@ -6,6 +6,7 @@ import { LINES_PER_CHUNK, NEWLINE } from '~/vue_shared/components/source_viewer/
jest.mock('highlight.js/lib/core', () => ({
highlight: jest.fn().mockReturnValue({ value: 'highlighted content' }),
registerLanguage: jest.fn(),
+ getLanguage: jest.fn(),
}));
jest.mock('~/vue_shared/components/source_viewer/plugins/index', () => ({
@@ -28,11 +29,37 @@ describe('Highlight utility', () => {
expect(registerPlugins).toHaveBeenCalled();
});
+ describe('sub-languages', () => {
+ const languageDefinition = {
+ subLanguage: 'xml',
+ contains: [{ subLanguage: 'javascript' }, { subLanguage: 'typescript' }],
+ };
+
+ beforeEach(async () => {
+ jest.spyOn(hljs, 'getLanguage').mockReturnValue(languageDefinition);
+ await highlight(fileType, rawContent, language);
+ });
+
+ it('registers the primary sub-language', () => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ languageDefinition.subLanguage,
+ expect.any(Function),
+ );
+ });
+
+ it.each(languageDefinition.contains)(
+ 'registers the rest of the sub-languages',
+ ({ subLanguage }) => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(subLanguage, expect.any(Function));
+ },
+ );
+ });
+
it('highlights the content', () => {
expect(hljs.highlight).toHaveBeenCalledWith(rawContent, { language });
});
- it('splits the content into chunks', () => {
+ it('splits the content into chunks', async () => {
const contentArray = Array.from({ length: 140 }, () => 'newline'); // simulate 140 lines of code
const chunks = [
@@ -52,7 +79,7 @@ describe('Highlight utility', () => {
},
];
- expect(highlight(fileType, contentArray.join(NEWLINE), language)).toEqual(
+ expect(await highlight(fileType, contentArray.join(NEWLINE), language)).toEqual(
expect.arrayContaining(chunks),
);
});
@@ -71,7 +98,7 @@ describe('unsupported languages', () => {
expect(hljs.highlight).not.toHaveBeenCalled();
});
- it('does not return a result', () => {
- expect(highlight(fileType, rawContent, unsupportedLanguage)).toBe(undefined);
+ it('does not return a result', async () => {
+ expect(await highlight(fileType, rawContent, unsupportedLanguage)).toBe(undefined);
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
index ee7164515f6..58d5a1a63ba 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
@@ -1,11 +1,15 @@
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { setHTMLFixture } from 'helpers/fixtures';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_new.vue';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk_new.vue';
-import { EVENT_ACTION, EVENT_LABEL_VIEWER } from '~/vue_shared/components/source_viewer/constants';
+import {
+ EVENT_ACTION,
+ EVENT_LABEL_VIEWER,
+ CODEOWNERS_FILE_NAME,
+} from '~/vue_shared/components/source_viewer/constants';
import Tracking from '~/tracking';
import LineHighlighter from '~/blob/line_highlighter';
import addBlobLinksTracking from '~/blob/blob_links_tracking';
@@ -13,6 +17,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import blameDataQuery from '~/vue_shared/components/source_viewer/queries/blame_data.query.graphql';
import Blame from '~/vue_shared/components/source_viewer/components/blame_info.vue';
import * as utils from '~/vue_shared/components/source_viewer/utils';
+import CodeownersValidation from 'ee_component/blob/components/codeowners_validation.vue';
import {
BLOB_DATA_MOCK,
@@ -43,16 +48,17 @@ describe('Source Viewer component', () => {
const blameInfo =
BLAME_DATA_QUERY_RESPONSE_MOCK.data.project.repository.blobs.nodes[0].blame.groups;
- const createComponent = ({ showBlame = true } = {}) => {
+ const createComponent = ({ showBlame = true, blob = {} } = {}) => {
fakeApollo = createMockApollo([[blameDataQuery, blameDataQueryHandlerSuccess]]);
wrapper = shallowMountExtended(SourceViewer, {
apolloProvider: fakeApollo,
mocks: { $route: { hash } },
propsData: {
- blob: BLOB_DATA_MOCK,
+ blob: { ...blob, ...BLOB_DATA_MOCK },
chunks: CHUNKS_MOCK,
projectPath: 'test',
+ currentRef: 'main',
showBlame,
},
});
@@ -156,4 +162,20 @@ describe('Source Viewer component', () => {
expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
});
});
+
+ describe('Codeowners validation', () => {
+ const findCodeownersValidation = () => wrapper.findComponent(CodeownersValidation);
+
+ it('does not render codeowners validation when file is not CODEOWNERS', async () => {
+ await createComponent();
+ await nextTick();
+ expect(findCodeownersValidation().exists()).toBe(false);
+ });
+
+ it('renders codeowners validation when file is CODEOWNERS', async () => {
+ await createComponent({ blob: { name: CODEOWNERS_FILE_NAME } });
+ await nextTick();
+ expect(findCodeownersValidation().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb b/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
deleted file mode 100644
index eca75d93c80..00000000000
--- a/spec/lib/gitlab/instrumentation/redis_client_middleware_spec.rb
+++ /dev/null
@@ -1,224 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require 'rspec-parameterized'
-require 'support/helpers/rails_helpers'
-
-RSpec.describe Gitlab::Instrumentation::RedisClientMiddleware, :request_store, feature_category: :scalability do
- using RSpec::Parameterized::TableSyntax
- include RedisHelpers
-
- let_it_be(:redis_store_class) { define_helper_redis_store_class }
- let_it_be(:redis_client) { RedisClient.new(redis_store_class.redis_client_params) }
-
- before do
- redis_client.call("flushdb")
- end
-
- describe 'read and write' do
- where(:setup, :command, :expect_write, :expect_read) do
- # The response is 'OK', the request size is the combined size of array
- # elements. Exercise counting of a status reply.
- [] | [:set, 'foo', 'bar'] | (3 + 3 + 3) | 2
-
- # The response is 1001, so 4 bytes. Exercise counting an integer reply.
- [[:set, 'foobar', 1000]] | [:incr, 'foobar'] | (4 + 6) | 4
-
- # Exercise counting empty multi bulk reply. Returns an empty hash `{}`
- [] | [:hgetall, 'foobar'] | (7 + 6) | 2
-
- # Hgetall response length is combined length of keys and values in the
- # hash. Exercises counting of a multi bulk reply
- # Returns `{"field"=>"hello world"}`, 5 for field, 11 for hello world, 8 for {, }, 4 "s, =, >
- [[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | (7 + 6) | (5 + 11 + 8)
-
- # Exercise counting of a bulk reply
- [[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | (3 + 3) | (3 * 100)
-
- # Nested array response: [['foo', 0.0], ['bar', 1.0]]. Returns scores as float.
- [[:zadd, 'myset', 0, 'foo'],
- [:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | (6 + 5 + 1 + 2 + 10) | (3 + 3 + 3 + 3)
- end
-
- with_them do
- it 'counts bytes read and written' do
- setup.each { |cmd| redis_client.call(*cmd) }
- RequestStore.clear!
- redis_client.call(*command)
-
- expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
- expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
- end
- end
- end
-
- describe 'counting' do
- let(:instrumentation_class) { redis_store_class.instrumentation_class }
-
- it 'counts successful requests' do
- expect(instrumentation_class).to receive(:instance_count_request).with(1).and_call_original
-
- redis_client.call(:get, 'foobar')
- end
-
- it 'counts successful pipelined requests' do
- expect(instrumentation_class).to receive(:instance_count_request).with(2).and_call_original
- expect(instrumentation_class).to receive(:instance_count_pipelined_request).with(2).and_call_original
-
- redis_client.pipelined do |pipeline|
- pipeline.call(:get, '{foobar}buz')
- pipeline.call(:get, '{foobar}baz')
- end
- end
-
- context 'when encountering exceptions' do
- before do
- allow(redis_client.instance_variable_get(:@raw_connection)).to receive(:call).and_raise(
- RedisClient::ConnectionError, 'Connection was closed or lost')
- end
-
- it 'counts exception' do
- expect(instrumentation_class).to receive(:instance_count_exception)
- .with(instance_of(RedisClient::ConnectionError)).and_call_original
- expect(instrumentation_class).to receive(:log_exception)
- .with(instance_of(RedisClient::ConnectionError)).and_call_original
- expect(instrumentation_class).to receive(:instance_count_request).and_call_original
-
- expect do
- redis_client.call(:auth, 'foo', 'bar')
- end.to raise_error(RedisClient::Error)
- end
- end
-
- context 'in production environment' do
- before do
- stub_rails_env('production') # to avoid raising CrossSlotError
- end
-
- it 'counts disallowed cross-slot requests' do
- expect(instrumentation_class).to receive(:increment_cross_slot_request_count).and_call_original
- expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
-
- redis_client.call(:mget, 'foo', 'bar')
- end
-
- it 'does not count allowed cross-slot requests' do
- expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
- expect(instrumentation_class).to receive(:increment_allowed_cross_slot_request_count).and_call_original
-
- Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- redis_client.call(:mget, 'foo', 'bar')
- end
- end
-
- it 'does not count allowed non-cross-slot requests' do
- expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
- expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
-
- Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- redis_client.call(:mget, 'bar')
- end
- end
-
- it 'skips count for non-cross-slot requests' do
- expect(instrumentation_class).not_to receive(:increment_cross_slot_request_count).and_call_original
- expect(instrumentation_class).not_to receive(:increment_allowed_cross_slot_request_count).and_call_original
-
- redis_client.call(:mget, '{foo}bar', '{foo}baz')
- end
- end
-
- context 'without active RequestStore' do
- before do
- ::RequestStore.end!
- end
-
- it 'still runs cross-slot validation' do
- expect do
- redis_client.call('mget', 'foo', 'bar')
- end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
- end
- end
- end
-
- describe 'latency' do
- let(:instrumentation_class) { redis_store_class.instrumentation_class }
-
- describe 'commands in the apdex' do
- where(:command) do
- [
- [[:get, 'foobar']],
- [%w[GET foobar]]
- ]
- end
-
- with_them do
- it 'measures requests we want in the apdex' do
- expect(instrumentation_class).to receive(:instance_observe_duration).with(a_value > 0)
- .and_call_original
-
- redis_client.call(*command)
- end
- end
-
- context 'with pipelined commands' do
- it 'measures requests that do not have blocking commands' do
- expect(instrumentation_class).to receive(:instance_observe_duration).twice.with(a_value > 0)
- .and_call_original
-
- redis_client.pipelined do |pipeline|
- pipeline.call(:get, '{foobar}buz')
- pipeline.call(:get, '{foobar}baz')
- end
- end
-
- it 'raises error when keys are not from the same slot' do
- expect do
- redis_client.pipelined do |pipeline|
- pipeline.call(:get, 'foo')
- pipeline.call(:get, 'bar')
- end
- end.to raise_error(instance_of(Gitlab::Instrumentation::RedisClusterValidator::CrossSlotError))
- end
- end
- end
-
- describe 'commands not in the apdex' do
- where(:setup, :command) do
- [['rpush', 'foobar', 1]] | ['brpop', 'foobar', 0]
- [['rpush', 'foobar', 1]] | ['blpop', 'foobar', 0]
- [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
- [['rpush', '{abc}foobar', 1]] | ['brpoplpush', '{abc}foobar', '{abc}bazqux', 0]
- [['zadd', 'foobar', 1, 'a']] | ['bzpopmin', 'foobar', 0]
- [['zadd', 'foobar', 1, 'a']] | ['bzpopmax', 'foobar', 0]
- [['xadd', 'mystream', 1, 'myfield', 'mydata']] | ['xread', 'block', 1, 'streams', 'mystream', '0-0']
- [['xadd', 'foobar', 1, 'myfield', 'mydata'],
- ['xgroup', 'create', 'foobar', 'mygroup',
- 0]] | ['xreadgroup', 'group', 'mygroup', 'myconsumer', 'block', 1, 'streams', 'foobar', '0-0']
- [] | ['command']
- end
-
- with_them do
- it 'skips requests we do not want in the apdex' do
- setup.each { |cmd| redis_client.call(*cmd) }
-
- expect(instrumentation_class).not_to receive(:instance_observe_duration)
-
- redis_client.call(*command)
- end
- end
-
- context 'with pipelined commands' do
- it 'skips requests that have blocking commands' do
- expect(instrumentation_class).not_to receive(:instance_observe_duration)
-
- redis_client.pipelined do |pipeline|
- pipeline.call(:get, '{foobar}buz')
- pipeline.call(:rpush, '{foobar}baz', 1)
- pipeline.call(:brpop, '{foobar}baz', 0)
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
index 1c62f5679d0..68dd784fb7e 100644
--- a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
before do
allow(Gitlab::Metrics::System).to receive(:monotonic_time)
- .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq.default_configuration[:timeout] + 2)
+ .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq[:timeout] + 2)
allow(Process).to receive(:kill)
allow(::Sidekiq).to receive(:logger).and_return(logger)
allow(logger).to receive(:warn)
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2]
+ [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2]
]
end
@@ -95,7 +95,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_categ
let(:signal_params) do
[
[:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds],
- [:TERM, pid, 'gracefully shut down', Sidekiq.default_configuration[:timeout] + 2],
+ [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2],
[:KILL, kill_pid, 'hard shut down', nil]
]
end
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 1900571273e..05bcdf2fc96 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Gitlab::Runtime, feature_category: :cloud_connector do
before do
stub_const('::Sidekiq', sidekiq_type)
allow(sidekiq_type).to receive(:server?).and_return(true)
- allow(sidekiq_type).to receive(:default_configuration).and_return({ concurrency: 2 })
+ allow(sidekiq_type).to receive(:[]).with(:concurrency).and_return(2)
end
it_behaves_like "valid runtime", :sidekiq, 5
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index dff7c2d4ae6..00b1666106f 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -178,8 +178,7 @@ RSpec.describe Gitlab::SidekiqConfig do
allow(::Gitlab::SidekiqConfig::WorkerRouter)
.to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
- allow(Sidekiq).to receive_message_chain(:default_configuration, :queues)
- .and_return(%w[default background_migration])
+ allow(Sidekiq).to receive(:[]).with(:queues).and_return(%w[default background_migration])
mappings = described_class.current_worker_queue_mappings
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index b1a8a9f4da3..2e07fa100e8 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -492,7 +492,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'completed_at' => current_utc_time.to_i }
end
- subject { described_class.new(Sidekiq.logger) }
+ subject { described_class.new }
it 'update payload correctly' do
travel_to(current_utc_time) do
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index e1662903fa4..9cf9901007c 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
described_class.initialize_process_metrics
end
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics, feature_category: :shar
end
it 'sets the concurrency metric' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq.default_configuration[:concurrency].to_i)
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq[:concurrency].to_i)
described_class.initialize_process_metrics
end
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
index 96d4042b1e6..bf379d9cb0d 100644
--- a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:migrator) { described_class.new(mappings) }
let(:set_after) do
- Sidekiq.redis { |c| c.call("ZRANGE", set_name, 0, -1, "WITHSCORES") }
+ Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
.map { |item, score| [Gitlab::Json.load(item), score] }
end
@@ -226,9 +226,8 @@ RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues,
let(:logger) { nil }
def list_queues
- queues = []
- Sidekiq.redis do |conn|
- conn.scan("MATCH", "queue:*") { |key| queues << key }
+ queues = Sidekiq.redis do |conn|
+ conn.scan_each(match: "queue:*").to_a
end
queues.uniq.map { |queue| queue.split(':', 2).last }
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 47c89a0bbbe..a570a66ffda 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues,
context 'when both multi-store feature flags are off' do
def with_redis(&block)
- Gitlab::Redis::Queues.with(&block)
+ Sidekiq.redis(&block)
end
before do
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
index e673e36adbd..be26c80d217 100644
--- a/spec/support/helpers/dns_helpers.rb
+++ b/spec/support/helpers/dns_helpers.rb
@@ -6,7 +6,6 @@ module DnsHelpers
stub_invalid_dns!
permit_local_dns!
permit_postgresql!
- permit_redis!
end
def permit_dns!
@@ -54,18 +53,6 @@ module DnsHelpers
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).map(&:host).compact.uniq
end
- def permit_redis!
- # https://github.com/redis-rb/redis-client/blob/v0.11.2/lib/redis_client/ruby_connection.rb#L51 uses Socket.tcp that
- # calls Addrinfo.getaddrinfo internally.
- hosts = Gitlab::Redis::ALL_CLASSES.map do |redis_instance|
- redis_instance.redis_client_params[:host]
- end.uniq.compact
-
- hosts.each do |host|
- allow(Addrinfo).to receive(:getaddrinfo).with(host, anything, nil, :STREAM, anything, anything, any_args).and_call_original
- end
- end
-
def stub_resolver(stubbed_lookups = {})
resolver = instance_double('Resolv::DNS')
allow(resolver).to receive(:timeouts=)
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index 060976eba2d..69c20a00c5a 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -93,6 +93,8 @@ RSpec.shared_context 'structured_logger' do
end
before do
+ allow(Sidekiq).to receive(:logger).and_return(logger)
+
allow(subject).to receive(:current_time).and_return(timestamp.to_f)
allow(Process).to receive(:clock_gettime).with(Process::CLOCK_REALTIME, :float_second)
@@ -101,7 +103,7 @@ RSpec.shared_context 'structured_logger' do
.and_return(clock_thread_cputime_start, clock_thread_cputime_end)
end
- subject { described_class.new(logger) }
+ subject { described_class.new }
def call_subject(job, queue)
# This structured logger strongly depends on execution of `InstrumentationLogger`
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index d541dee438e..85ee3ed4183 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -55,7 +55,6 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes)
allow(concurrency_metric).to receive(:set)
- allow(completion_seconds_metric).to receive(:get)
end
end
diff --git a/spec/support/shared_examples/redis/redis_shared_examples.rb b/spec/support/shared_examples/redis/redis_shared_examples.rb
index 5b393744892..f184f678283 100644
--- a/spec/support/shared_examples/redis/redis_shared_examples.rb
+++ b/spec/support/shared_examples/redis/redis_shared_examples.rb
@@ -86,67 +86,6 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
- describe '.redis_client_params' do
- # .redis_client_params wraps over `.redis_store_options` by modifying its outputs
- # to be compatible with `RedisClient`. We test for compatibility in this block while
- # the contents of redis_store_options are tested in the `.params` block.
-
- subject { described_class.new(rails_env).redis_client_params }
-
- let(:rails_env) { 'development' }
- let(:config_file_name) { config_old_format_socket }
-
- shared_examples 'instrumentation_class in custom key' do
- it 'moves instrumentation class into custom' do
- expect(subject[:custom][:instrumentation_class]).to eq(described_class.instrumentation_class)
- expect(subject[:instrumentation_class]).to be_nil
- end
- end
-
- context 'when url is host based' do
- context 'with old format' do
- let(:config_file_name) { config_old_format_host }
-
- it 'does not raise ArgumentError for invalid keywords' do
- expect { RedisClient.config(**subject) }.not_to raise_error
- end
-
- it_behaves_like 'instrumentation_class in custom key'
- end
-
- context 'with new format' do
- let(:config_file_name) { config_new_format_host }
-
- where(:rails_env, :host) do
- [
- %w[development development-host],
- %w[test test-host],
- %w[production production-host]
- ]
- end
-
- with_them do
- it 'does not raise ArgumentError for invalid keywords in SentinelConfig' do
- expect(subject[:name]).to eq(host)
- expect { RedisClient.sentinel(**subject) }.not_to raise_error
- end
-
- it_behaves_like 'instrumentation_class in custom key'
- end
- end
- end
-
- context 'when url contains unix socket reference' do
- let(:config_file_name) { config_old_format_socket }
-
- it 'does not raise ArgumentError for invalid keywords' do
- expect { RedisClient.config(**subject) }.not_to raise_error
- end
-
- it_behaves_like 'instrumentation_class in custom key'
- end
- end
-
describe '.params' do
subject { described_class.new(rails_env).params }
diff --git a/spec/support/sidekiq.rb b/spec/support/sidekiq.rb
index 6c354c780b2..b25f39c5e74 100644
--- a/spec/support/sidekiq.rb
+++ b/spec/support/sidekiq.rb
@@ -1,19 +1,13 @@
# frozen_string_literal: true
RSpec.configure do |config|
- def gitlab_sidekiq_inline
+ def gitlab_sidekiq_inline(&block)
# We need to cleanup the queues before running jobs in specs because the
# middleware might have written to redis
redis_queues_cleanup!
redis_queues_metadata_cleanup!
-
- # Scoped inline! is thread-safe which breaks capybara specs
- # see https://github.com/sidekiq/sidekiq/issues/6069
- Sidekiq::Testing.inline!
-
- yield
+ Sidekiq::Testing.inline!(&block)
ensure
- Sidekiq::Testing.fake! # fake is the default so we reset it to that
redis_queues_cleanup!
redis_queues_metadata_cleanup!
end
diff --git a/spec/support/sidekiq_middleware.rb b/spec/support/sidekiq_middleware.rb
index cbd6163d46b..f4d90ff5151 100644
--- a/spec/support/sidekiq_middleware.rb
+++ b/spec/support/sidekiq_middleware.rb
@@ -6,6 +6,15 @@ require 'sidekiq/testing'
module SidekiqMiddleware
def with_sidekiq_server_middleware(&block)
Sidekiq::Testing.server_middleware.clear
+
+ if Gem::Version.new(Sidekiq::VERSION) != Gem::Version.new('6.5.12')
+ raise 'New version of sidekiq detected, please remove this line'
+ end
+
+ # This line is a workaround for a Sidekiq bug that is already fixed in v7.0.0
+ # https://github.com/mperham/sidekiq/commit/1b83a152786ed382f07fff12d2608534f1e3c922
+ Sidekiq::Testing.server_middleware.instance_variable_set(:@config, Sidekiq)
+
Sidekiq::Testing.server_middleware(&block)
ensure
Sidekiq::Testing.server_middleware.clear
diff --git a/vendor/gems/sidekiq-reliable-fetch/Gemfile b/vendor/gems/sidekiq-reliable-fetch/Gemfile
index 8f86b2fe0b6..3bed294f56f 100644
--- a/vendor/gems/sidekiq-reliable-fetch/Gemfile
+++ b/vendor/gems/sidekiq-reliable-fetch/Gemfile
@@ -11,5 +11,4 @@ group :test do
gem "pry"
gem 'simplecov', require: false
gem 'stub_env', '~> 1.0'
- gem 'redis', '~> 4.8'
end
diff --git a/vendor/gems/sidekiq-reliable-fetch/Gemfile.lock b/vendor/gems/sidekiq-reliable-fetch/Gemfile.lock
index 484370fdfcc..aeb163db018 100644
--- a/vendor/gems/sidekiq-reliable-fetch/Gemfile.lock
+++ b/vendor/gems/sidekiq-reliable-fetch/Gemfile.lock
@@ -1,51 +1,46 @@
PATH
remote: .
specs:
- gitlab-sidekiq-fetcher (0.11.0)
+ gitlab-sidekiq-fetcher (0.10.0)
json (>= 2.5)
- sidekiq (~> 7.0)
+ sidekiq (~> 6.1)
GEM
remote: https://rubygems.org/
specs:
- coderay (1.1.3)
- concurrent-ruby (1.2.2)
- connection_pool (2.4.1)
- diff-lcs (1.5.0)
- docile (1.4.0)
- json (2.6.3)
- method_source (1.0.0)
- pry (0.14.2)
- coderay (~> 1.1)
- method_source (~> 1.0)
- rack (3.0.8)
+ coderay (1.1.2)
+ connection_pool (2.4.0)
+ diff-lcs (1.3)
+ docile (1.3.1)
+ json (2.5.1)
+ method_source (0.9.0)
+ pry (0.11.3)
+ coderay (~> 1.1.0)
+ method_source (~> 0.9.0)
+ rack (2.2.6.4)
redis (4.8.1)
- redis-client (0.18.0)
- connection_pool
- rspec (3.12.0)
- rspec-core (~> 3.12.0)
- rspec-expectations (~> 3.12.0)
- rspec-mocks (~> 3.12.0)
- rspec-core (3.12.2)
- rspec-support (~> 3.12.0)
- rspec-expectations (3.12.3)
+ rspec (3.8.0)
+ rspec-core (~> 3.8.0)
+ rspec-expectations (~> 3.8.0)
+ rspec-mocks (~> 3.8.0)
+ rspec-core (3.8.0)
+ rspec-support (~> 3.8.0)
+ rspec-expectations (3.8.1)
diff-lcs (>= 1.2.0, < 2.0)
- rspec-support (~> 3.12.0)
- rspec-mocks (3.12.6)
+ rspec-support (~> 3.8.0)
+ rspec-mocks (3.8.0)
diff-lcs (>= 1.2.0, < 2.0)
- rspec-support (~> 3.12.0)
- rspec-support (3.12.1)
- sidekiq (7.2.0)
- concurrent-ruby (< 2)
- connection_pool (>= 2.3.0)
- rack (>= 2.2.4)
- redis-client (>= 0.14.0)
- simplecov (0.22.0)
+ rspec-support (~> 3.8.0)
+ rspec-support (3.8.0)
+ sidekiq (6.5.8)
+ connection_pool (>= 2.2.5, < 3)
+ rack (~> 2.0)
+ redis (>= 4.5.0, < 5)
+ simplecov (0.16.1)
docile (~> 1.1)
- simplecov-html (~> 0.11)
- simplecov_json_formatter (~> 0.1)
- simplecov-html (0.12.3)
- simplecov_json_formatter (0.1.4)
+ json (>= 1.8, < 3)
+ simplecov-html (~> 0.10.0)
+ simplecov-html (0.10.2)
stub_env (1.0.4)
rspec (>= 2.0, < 4.0)
@@ -55,7 +50,6 @@ PLATFORMS
DEPENDENCIES
gitlab-sidekiq-fetcher!
pry
- redis (~> 4.8)
rspec (~> 3)
simplecov
stub_env (~> 1.0)
diff --git a/vendor/gems/sidekiq-reliable-fetch/README.md b/vendor/gems/sidekiq-reliable-fetch/README.md
index 5e218a76cd5..4c7029e3955 100644
--- a/vendor/gems/sidekiq-reliable-fetch/README.md
+++ b/vendor/gems/sidekiq-reliable-fetch/README.md
@@ -6,7 +6,7 @@ fetches from Redis.
It's based on https://github.com/TEA-ebook/sidekiq-reliable-fetch.
-**IMPORTANT NOTE:** Since version `0.11.0` this gem works only with `sidekiq >= 7` (which introduced Fetch API breaking changes). Please use version `~> 0.10` if you use older version of the `sidekiq` .
+**IMPORTANT NOTE:** Since version `0.7.0` this gem works only with `sidekiq >= 6.1` (which introduced Fetch API breaking changes). Please use version `~> 0.5` if you use older version of the `sidekiq` .
**UPGRADE NOTE:** If upgrading from 0.7.0, strongly consider a full deployed step on 0.7.1 before 0.8.0; that fixes a bug in the queue name validation that will hit if sidekiq nodes running 0.7.0 see working queues named by 0.8.0. See https://gitlab.com/gitlab-org/sidekiq-reliable-fetch/-/merge_requests/22
diff --git a/vendor/gems/sidekiq-reliable-fetch/gitlab-sidekiq-fetcher.gemspec b/vendor/gems/sidekiq-reliable-fetch/gitlab-sidekiq-fetcher.gemspec
index df89abca4ac..b656267003a 100644
--- a/vendor/gems/sidekiq-reliable-fetch/gitlab-sidekiq-fetcher.gemspec
+++ b/vendor/gems/sidekiq-reliable-fetch/gitlab-sidekiq-fetcher.gemspec
@@ -1,6 +1,6 @@
Gem::Specification.new do |s|
s.name = 'gitlab-sidekiq-fetcher'
- s.version = '0.11.0'
+ s.version = '0.10.0'
s.authors = ['TEA', 'GitLab']
s.email = 'valery@gitlab.com'
s.license = 'LGPL-3.0'
@@ -10,6 +10,6 @@ Gem::Specification.new do |s|
s.require_paths = ['lib']
s.files = Dir.glob('lib/**/*.*')
s.test_files = Dir.glob('{spec,tests}/**/*.*')
- s.add_dependency 'sidekiq', '~> 7.0'
+ s.add_dependency 'sidekiq', '~> 6.1'
s.add_runtime_dependency 'json', '>= 2.5'
end
diff --git a/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/base_reliable_fetch.rb b/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/base_reliable_fetch.rb
index 7ae9bcf63e4..006aad87abe 100644
--- a/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/base_reliable_fetch.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/base_reliable_fetch.rb
@@ -53,7 +53,7 @@ module Sidekiq
Sidekiq::ReliableFetch
end
- config[:fetch_class] = fetch_strategy
+ config[:fetch] = fetch_strategy.new(config)
Sidekiq.logger.info('GitLab reliable fetch activated!')
@@ -115,18 +115,18 @@ module Sidekiq
attr_reader :cleanup_interval, :last_try_to_take_lease_at, :lease_interval,
:queues, :use_semi_reliable_fetch,
- :strictly_ordered_queues, :config
+ :strictly_ordered_queues
- def initialize(capsule)
- raise ArgumentError, 'missing queue list' unless capsule.config.queues
+ def initialize(options)
+ raise ArgumentError, 'missing queue list' unless options[:queues]
- @config = capsule.config
+ @config = options
@interrupted_set = Sidekiq::InterruptedSet.new
- @cleanup_interval = config.fetch(:cleanup_interval, DEFAULT_CLEANUP_INTERVAL)
- @lease_interval = config.fetch(:lease_interval, DEFAULT_LEASE_INTERVAL)
+ @cleanup_interval = options.fetch(:cleanup_interval, DEFAULT_CLEANUP_INTERVAL)
+ @lease_interval = options.fetch(:lease_interval, DEFAULT_LEASE_INTERVAL)
@last_try_to_take_lease_at = 0
- @strictly_ordered_queues = !!config[:strict]
- @queues = config.queues.map { |q| "queue:#{q}" }
+ @strictly_ordered_queues = !!options[:strict]
+ @queues = options[:queues].map { |q| "queue:#{q}" }
end
def retrieve_work
@@ -140,7 +140,7 @@ module Sidekiq
"#{self.class} does not implement #{__method__}"
end
- def bulk_requeue(inprogress)
+ def bulk_requeue(inprogress, _options)
return if inprogress.empty?
Sidekiq.redis do |conn|
@@ -202,7 +202,7 @@ module Sidekiq
Sidekiq.logger.info('Cleaning working queues')
Sidekiq.redis do |conn|
- conn.scan(match: "#{WORKING_QUEUE_PREFIX}:queue:*", count: SCAN_COUNT) do |key|
+ conn.scan_each(match: "#{WORKING_QUEUE_PREFIX}:queue:*", count: SCAN_COUNT) do |key|
original_queue, identity = extract_queue_and_identity(key)
next if original_queue.nil? || identity.nil?
@@ -234,7 +234,7 @@ module Sidekiq
rescue NameError
end
- max_retries_after_interruption ||= config[:max_retries_after_interruption]
+ max_retries_after_interruption ||= @config[:max_retries_after_interruption]
max_retries_after_interruption ||= DEFAULT_MAX_RETRIES_AFTER_INTERRUPTION
max_retries_after_interruption
end
@@ -263,7 +263,7 @@ module Sidekiq
@last_try_to_take_lease_at = Time.now.to_f
Sidekiq.redis do |conn|
- conn.set(LEASE_KEY, 1, 'nx', 'ex', cleanup_interval)
+ conn.set(LEASE_KEY, 1, nx: true, ex: cleanup_interval)
end
end
diff --git a/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/interrupted_set.rb b/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/interrupted_set.rb
index 799e744957e..2fc7a10f9d0 100644
--- a/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/interrupted_set.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/interrupted_set.rb
@@ -45,7 +45,7 @@ module Sidekiq
end
def self.options
- Sidekiq.default_configuration
+ Sidekiq.respond_to?(:[]) ? Sidekiq : Sidekiq.options
end
end
end
diff --git a/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/semi_reliable_fetch.rb b/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/semi_reliable_fetch.rb
index 7beb83fea12..91b41501374 100644
--- a/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/semi_reliable_fetch.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/lib/sidekiq/semi_reliable_fetch.rb
@@ -7,7 +7,7 @@ module Sidekiq
# for semi-reliable fetch.
DEFAULT_SEMI_RELIABLE_FETCH_TIMEOUT = 2 # seconds
- def initialize(capsule)
+ def initialize(options)
super
@queues = @queues.uniq
@@ -16,7 +16,7 @@ module Sidekiq
private
def retrieve_unit_of_work
- work = brpop_with_sidekiq
+ work = Sidekiq.redis { |conn| conn.brpop(*queues_cmd, timeout: semi_reliable_fetch_timeout) }
return unless work
queue, job = work
@@ -29,17 +29,6 @@ module Sidekiq
unit_of_work
end
- def brpop_with_sidekiq
- Sidekiq.redis do |conn|
- conn.blocking_call(
- conn.read_timeout + semi_reliable_fetch_timeout,
- "brpop",
- *queues_cmd,
- semi_reliable_fetch_timeout
- )
- end
- end
-
def queues_cmd
if strictly_ordered_queues
@queues
diff --git a/vendor/gems/sidekiq-reliable-fetch/spec/base_reliable_fetch_spec.rb b/vendor/gems/sidekiq-reliable-fetch/spec/base_reliable_fetch_spec.rb
index 27fb86d2a8e..32e62925aaf 100644
--- a/vendor/gems/sidekiq-reliable-fetch/spec/base_reliable_fetch_spec.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/spec/base_reliable_fetch_spec.rb
@@ -3,20 +3,14 @@ require 'fetch_shared_examples'
require 'sidekiq/base_reliable_fetch'
require 'sidekiq/reliable_fetch'
require 'sidekiq/semi_reliable_fetch'
-require 'sidekiq/capsule'
describe Sidekiq::BaseReliableFetch do
let(:job) { Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo']) }
- let(:queues) { ['foo'] }
- let(:options) { { queues: queues } }
- let(:config) { Sidekiq::Config.new(options) }
- let(:capsule) { Sidekiq::Capsule.new("default", config) }
- let(:fetcher) { Sidekiq::ReliableFetch.new(capsule) }
before { Sidekiq.redis(&:flushdb) }
describe 'UnitOfWork' do
- before { config.queues = queues }
+ let(:fetcher) { Sidekiq::ReliableFetch.new(queues: ['foo']) }
describe '#requeue' do
it 'requeues job' do
@@ -46,16 +40,14 @@ describe Sidekiq::BaseReliableFetch do
end
describe '#bulk_requeue' do
- let(:queues) { %w[foo bar] }
+ let(:options) { { queues: %w[foo bar] } }
let!(:queue1) { Sidekiq::Queue.new('foo') }
let!(:queue2) { Sidekiq::Queue.new('bar') }
- before { config.queues = queues }
-
it 'requeues the bulk' do
uow = described_class::UnitOfWork
jobs = [ uow.new('queue:foo', job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
- described_class.new(capsule).bulk_requeue(jobs)
+ described_class.new(options).bulk_requeue(jobs, nil)
expect(queue1.size).to eq 2
expect(queue2.size).to eq 1
@@ -65,26 +57,24 @@ describe Sidekiq::BaseReliableFetch do
uow = described_class::UnitOfWork
interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
- described_class.new(capsule).bulk_requeue(jobs)
+ described_class.new(options).bulk_requeue(jobs, nil)
expect(queue1.size).to eq 1
expect(queue2.size).to eq 1
expect(Sidekiq::InterruptedSet.new.size).to eq 1
end
- context 'when max_retries_after_interruption is disabled' do
- let(:options) { { queues: queues, max_retries_after_interruption: -1 } }
+ it 'does not put jobs into interrupted queue if it is disabled' do
+ options[:max_retries_after_interruption] = -1
- it 'does not put jobs into interrupted queue' do
- uow = described_class::UnitOfWork
- interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
- jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
- described_class.new(capsule).bulk_requeue(jobs)
+ uow = described_class::UnitOfWork
+ interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
+ jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
+ described_class.new(options).bulk_requeue(jobs, nil)
- expect(queue1.size).to eq 2
- expect(queue2.size).to eq 1
- expect(Sidekiq::InterruptedSet.new.size).to eq 0
- end
+ expect(queue1.size).to eq 2
+ expect(queue2.size).to eq 1
+ expect(Sidekiq::InterruptedSet.new.size).to eq 0
end
it 'does not put jobs into interrupted queue if it is disabled on the worker' do
@@ -93,7 +83,7 @@ describe Sidekiq::BaseReliableFetch do
uow = described_class::UnitOfWork
interrupted_job = Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo'], interrupted_count: 3)
jobs = [ uow.new('queue:foo', interrupted_job), uow.new('queue:foo', job), uow.new('queue:bar', job) ]
- described_class.new(capsule).bulk_requeue(jobs)
+ described_class.new(options).bulk_requeue(jobs, nil)
expect(queue1.size).to eq 2
expect(queue2.size).to eq 1
diff --git a/vendor/gems/sidekiq-reliable-fetch/spec/fetch_shared_examples.rb b/vendor/gems/sidekiq-reliable-fetch/spec/fetch_shared_examples.rb
index 11489a37b27..df7f715f2f9 100644
--- a/vendor/gems/sidekiq-reliable-fetch/spec/fetch_shared_examples.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/spec/fetch_shared_examples.rb
@@ -1,70 +1,54 @@
shared_examples 'a Sidekiq fetcher' do
let(:queues) { ['assigned'] }
- let(:options) { { queues: queues } }
- let(:config) { Sidekiq::Config.new(options) }
- let(:capsule) { Sidekiq::Capsule.new("default", config) }
- before do
- config.queues = queues
- Sidekiq.redis(&:flushdb)
- end
+ before { Sidekiq.redis(&:flushdb) }
describe '#retrieve_work' do
let(:job) { Sidekiq.dump_json(class: 'Bob', args: [1, 2, 'foo']) }
- let(:fetcher) { described_class.new(capsule) }
+ let(:fetcher) { described_class.new(queues: queues) }
it 'does not clean up orphaned jobs more than once per cleanup interval' do
- Sidekiq::Client.via(Sidekiq::RedisConnection.create(url: REDIS_URL, size: 10)) do
- expect(fetcher).to receive(:clean_working_queues!).once
+ Sidekiq.redis = Sidekiq::RedisConnection.create(url: REDIS_URL, size: 10)
- threads = 10.times.map do
- Thread.new do
- fetcher.retrieve_work
- end
- end
+ expect(fetcher).to receive(:clean_working_queues!).once
- threads.map(&:join)
+ threads = 10.times.map do
+ Thread.new do
+ fetcher.retrieve_work
+ end
end
- end
- context 'when strictly order is enabled' do
- let(:queues) { ['first', 'second'] }
- let(:options) { { strict: true, queues: queues } }
+ threads.map(&:join)
+ end
- it 'retrieves by order' do
- fetcher = described_class.new(capsule)
+ it 'retrieves by order when strictly order is enabled' do
+ fetcher = described_class.new(strict: true, queues: ['first', 'second'])
- Sidekiq.redis do |conn|
- conn.rpush('queue:first', ['msg3', 'msg2', 'msg1'])
- conn.rpush('queue:second', 'msg4')
- end
+ Sidekiq.redis do |conn|
+ conn.rpush('queue:first', ['msg3', 'msg2', 'msg1'])
+ conn.rpush('queue:second', 'msg4')
+ end
- jobs = (1..4).map { fetcher.retrieve_work.job }
+ jobs = (1..4).map { fetcher.retrieve_work.job }
- expect(jobs).to eq ['msg1', 'msg2', 'msg3', 'msg4']
- end
+ expect(jobs).to eq ['msg1', 'msg2', 'msg3', 'msg4']
end
- context 'when queues are not strictly ordered' do
- let(:queues) { ['first', 'second'] }
+ it 'does not starve any queue when queues are not strictly ordered' do
+ fetcher = described_class.new(queues: ['first', 'second'])
- it 'does not starve any queue' do
- fetcher = described_class.new(capsule)
-
- Sidekiq.redis do |conn|
- conn.rpush('queue:first', (1..200).map { |i| "msg#{i}" })
- conn.rpush('queue:second', 'this_job_should_not_stuck')
- end
+ Sidekiq.redis do |conn|
+ conn.rpush('queue:first', (1..200).map { |i| "msg#{i}" })
+ conn.rpush('queue:second', 'this_job_should_not_stuck')
+ end
- jobs = (1..100).map { fetcher.retrieve_work.job }
+ jobs = (1..100).map { fetcher.retrieve_work.job }
- expect(jobs).to include 'this_job_should_not_stuck'
- end
+ expect(jobs).to include 'this_job_should_not_stuck'
end
shared_examples "basic queue handling" do |queue|
- let(:queues) { [queue] }
- let(:fetcher) { described_class.new(capsule) }
+ let (:fetcher) { described_class.new(queues: [queue]) }
it 'retrieves the job and puts it to working queue' do
Sidekiq.redis { |conn| conn.rpush("queue:#{queue}", job) }
@@ -166,8 +150,7 @@ shared_examples 'a Sidekiq fetcher' do
context 'with short cleanup interval' do
let(:short_interval) { 1 }
- let(:options) { { queues: queues, lease_interval: short_interval, cleanup_interval: short_interval } }
- let(:fetcher) { described_class.new(capsule) }
+ let(:fetcher) { described_class.new(queues: queues, lease_interval: short_interval, cleanup_interval: short_interval) }
it 'requeues when there is no heartbeat' do
Sidekiq.redis { |conn| conn.rpush('queue:assigned', job) }
diff --git a/vendor/gems/sidekiq-reliable-fetch/spec/reliable_fetch_spec.rb b/vendor/gems/sidekiq-reliable-fetch/spec/reliable_fetch_spec.rb
index b919d610aca..bdef04a021f 100644
--- a/vendor/gems/sidekiq-reliable-fetch/spec/reliable_fetch_spec.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/spec/reliable_fetch_spec.rb
@@ -2,7 +2,6 @@ require 'spec_helper'
require 'fetch_shared_examples'
require 'sidekiq/base_reliable_fetch'
require 'sidekiq/reliable_fetch'
-require 'sidekiq/capsule'
describe Sidekiq::ReliableFetch do
include_examples 'a Sidekiq fetcher'
diff --git a/vendor/gems/sidekiq-reliable-fetch/spec/semi_reliable_fetch_spec.rb b/vendor/gems/sidekiq-reliable-fetch/spec/semi_reliable_fetch_spec.rb
index 8b167ae7ee5..60cd81ba913 100644
--- a/vendor/gems/sidekiq-reliable-fetch/spec/semi_reliable_fetch_spec.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/spec/semi_reliable_fetch_spec.rb
@@ -2,9 +2,6 @@ require 'spec_helper'
require 'fetch_shared_examples'
require 'sidekiq/base_reliable_fetch'
require 'sidekiq/semi_reliable_fetch'
-require 'sidekiq/capsule'
-require 'sidekiq/config'
-require 'redis'
describe Sidekiq::SemiReliableFetch do
include_examples 'a Sidekiq fetcher'
@@ -12,11 +9,7 @@ describe Sidekiq::SemiReliableFetch do
describe '#retrieve_work' do
let(:queues) { ['stuff_to_do'] }
let(:options) { { queues: queues } }
- let(:config) { Sidekiq::Config.new(options) }
- let(:capsule) { Sidekiq::Capsule.new("default", config) }
- let(:fetcher) { described_class.new(capsule) }
-
- before { config.queues = queues }
+ let(:fetcher) { described_class.new(options) }
context 'timeout config' do
before do
@@ -27,9 +20,8 @@ describe Sidekiq::SemiReliableFetch do
let(:timeout) { nil }
it 'brpops with the default timeout timeout' do
- Sidekiq.redis do |conn|
- expect(conn).to receive(:blocking_call)
- .with(conn.read_timeout + 2, 'brpop', 'queue:stuff_to_do', 2).once.and_call_original
+ Sidekiq.redis do |connection|
+ expect(connection).to receive(:brpop).with("queue:stuff_to_do", { timeout: 2 }).once.and_call_original
fetcher.retrieve_work
end
@@ -40,9 +32,8 @@ describe Sidekiq::SemiReliableFetch do
let(:timeout) { '5' }
it 'brpops with the default timeout timeout' do
- Sidekiq.redis do |conn|
- expect(conn).to receive(:blocking_call)
- .with(conn.read_timeout + 5, 'brpop', 'queue:stuff_to_do', 5).once.and_call_original
+ Sidekiq.redis do |connection|
+ expect(connection).to receive(:brpop).with("queue:stuff_to_do", { timeout: 5 }).once.and_call_original
fetcher.retrieve_work
end
diff --git a/vendor/gems/sidekiq-reliable-fetch/spec/spec_helper.rb b/vendor/gems/sidekiq-reliable-fetch/spec/spec_helper.rb
index ab1c5317ff3..45418571579 100644
--- a/vendor/gems/sidekiq-reliable-fetch/spec/spec_helper.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/spec/spec_helper.rb
@@ -9,7 +9,7 @@ SimpleCov.start
REDIS_URL = ENV['REDIS_URL'] || 'redis://localhost:6379/10'
Sidekiq.configure_client do |config|
- config.redis = { url: REDIS_URL, read_timeout: 5 }
+ config.redis = { url: REDIS_URL }
end
Sidekiq.logger.level = Logger::ERROR
diff --git a/vendor/gems/sidekiq-reliable-fetch/tests/interruption/config.rb b/vendor/gems/sidekiq-reliable-fetch/tests/interruption/config.rb
index a8f66a5f041..f69cca96d80 100644
--- a/vendor/gems/sidekiq-reliable-fetch/tests/interruption/config.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/tests/interruption/config.rb
@@ -14,7 +14,6 @@ Sidekiq.configure_server do |config|
# These will be ignored for :basic
config[:cleanup_interval] = TEST_CLEANUP_INTERVAL
config[:lease_interval] = TEST_LEASE_INTERVAL
- config[:queues] = ['default']
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
end
diff --git a/vendor/gems/sidekiq-reliable-fetch/tests/reliability/config.rb b/vendor/gems/sidekiq-reliable-fetch/tests/reliability/config.rb
index c516112ccb7..05ffcfca9b5 100644
--- a/vendor/gems/sidekiq-reliable-fetch/tests/reliability/config.rb
+++ b/vendor/gems/sidekiq-reliable-fetch/tests/reliability/config.rb
@@ -23,7 +23,6 @@ Sidekiq.configure_server do |config|
# These will be ignored for :basic
config[:cleanup_interval] = TEST_CLEANUP_INTERVAL
config[:lease_interval] = TEST_LEASE_INTERVAL
- config[:queues] = ['default']
Sidekiq::ReliableFetch.setup_reliable_fetch!(config)
end
diff --git a/vite.config.js b/vite.config.js
index 68050615272..5d62aed15b9 100644
--- a/vite.config.js
+++ b/vite.config.js
@@ -90,8 +90,13 @@ export default defineConfig({
}),
],
define: {
- IS_EE: IS_EE ? 'window.gon && window.gon.ee' : JSON.stringify(false),
- IS_JH: IS_JH ? 'window.gon && window.gon.jh' : JSON.stringify(false),
+ // window can be undefined in a Web Worker
+ IS_EE: IS_EE
+ ? 'typeof window !== "undefined" && window.gon && window.gon.ee'
+ : JSON.stringify(false),
+ IS_JH: IS_JH
+ ? 'typeof window !== "undefined" && window.gon && window.gon.jh'
+ : JSON.stringify(false),
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
'process.env.SOURCEGRAPH_PUBLIC_PATH': JSON.stringify(SOURCEGRAPH_PUBLIC_PATH),
'process.env.GITLAB_WEB_IDE_PUBLIC_PATH': JSON.stringify(GITLAB_WEB_IDE_PUBLIC_PATH),
diff --git a/yarn.lock b/yarn.lock
index 85562cecd65..7717df04177 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -8760,7 +8760,7 @@ loader-runner@^4.1.0:
resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1"
integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==
-loader-utils@^1.0.0, loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3:
+loader-utils@^1.0.2, loader-utils@^1.1.0, loader-utils@^1.2.3:
version "1.4.2"
resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3"
integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==
@@ -11655,14 +11655,6 @@ saxes@^5.0.1:
dependencies:
xmlchars "^2.2.0"
-schema-utils@^0.4.0:
- version "0.4.7"
- resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.7.tgz#ba74f597d2be2ea880131746ee17d0a093c68187"
- integrity sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ==
- dependencies:
- ajv "^6.1.0"
- ajv-keywords "^3.1.0"
-
schema-utils@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770"
@@ -13860,13 +13852,13 @@ worker-farm@^1.7.0:
dependencies:
errno "~0.1.7"
-worker-loader@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/worker-loader/-/worker-loader-2.0.0.tgz#45fda3ef76aca815771a89107399ee4119b430ac"
- integrity sha512-tnvNp4K3KQOpfRnD20m8xltE3eWh89Ye+5oj7wXEEHKac1P4oZ6p9oTj8/8ExqoSBnk9nu5Pr4nKfQ1hn2APJw==
+worker-loader@^3.0.8:
+ version "3.0.8"
+ resolved "https://registry.yarnpkg.com/worker-loader/-/worker-loader-3.0.8.tgz#5fc5cda4a3d3163d9c274a4e3a811ce8b60dbb37"
+ integrity sha512-XQyQkIFeRVC7f7uRhFdNMe/iJOdO6zxAaR3EWbDp45v3mDhrTi+++oswKNxShUNjPC/1xUp5DB29YKLhFo129g==
dependencies:
- loader-utils "^1.0.0"
- schema-utils "^0.4.0"
+ loader-utils "^2.0.0"
+ schema-utils "^3.0.0"
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0:
version "7.0.0"