Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-01-31 12:08:53 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-01-31 12:08:53 +0300
commitfd3a95f07ae9cd78fecffcfa5de4494f933a7808 (patch)
treea38a8abb0afb14aa396edd30137ddf45e71d2713
parent6a7005feed2e88568f42627e7190ff5c4f2aa8d3 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/boards/components/issue_card_inner.vue2
-rw-r--r--app/assets/javascripts/lib/utils/datetime_range.js97
-rw-r--r--app/assets/javascripts/monitoring/utils.js38
-rw-r--r--app/serializers/README.md2
-rw-r--r--app/workers/concerns/worker_context.rb38
-rw-r--r--changelogs/unreleased/197894.yml5
-rw-r--r--doc/administration/raketasks/project_import_export.md35
-rw-r--r--doc/api/project_import_export.md32
-rw-r--r--doc/fixtures/gitlab_tanuki.pngbin0 -> 2545 bytes
-rw-r--r--doc/user/project/settings/import_export.md73
-rw-r--r--lib/gitlab/batch_worker_context.rb32
-rw-r--r--lib/gitlab/sidekiq_middleware.rb1
-rw-r--r--lib/gitlab/sidekiq_middleware/worker_context.rb15
-rw-r--r--lib/gitlab/sidekiq_middleware/worker_context/client.rb23
-rw-r--r--lib/gitlab/sidekiq_middleware/worker_context/server.rb10
-rw-r--r--spec/controllers/help_controller_spec.rb2
-rw-r--r--spec/frontend/boards/issue_card_spec.js25
-rw-r--r--spec/frontend/lib/utils/datetime_range_spec.js327
-rw-r--r--spec/frontend/monitoring/utils_spec.js97
-rw-r--r--spec/lib/gitlab/batch_worker_context_spec.rb28
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb45
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb13
-rw-r--r--spec/support/db_cleaner.rb4
-rw-r--r--spec/workers/concerns/worker_context_spec.rb78
24 files changed, 857 insertions, 165 deletions
diff --git a/app/assets/javascripts/boards/components/issue_card_inner.vue b/app/assets/javascripts/boards/components/issue_card_inner.vue
index 7f7510545c6..0e0d1e64f4a 100644
--- a/app/assets/javascripts/boards/components/issue_card_inner.vue
+++ b/app/assets/javascripts/boards/components/issue_card_inner.vue
@@ -233,7 +233,7 @@ export default {
:key="assignee.id"
:link-href="assigneeUrl(assignee)"
:img-alt="avatarUrlTitle(assignee)"
- :img-src="assignee.avatar"
+ :img-src="assignee.avatar || assignee.avatar_url"
:img-size="24"
class="js-no-trigger"
tooltip-placement="bottom"
diff --git a/app/assets/javascripts/lib/utils/datetime_range.js b/app/assets/javascripts/lib/utils/datetime_range.js
index 53b8702afa7..6d4e21cf386 100644
--- a/app/assets/javascripts/lib/utils/datetime_range.js
+++ b/app/assets/javascripts/lib/utils/datetime_range.js
@@ -1,4 +1,5 @@
import dateformat from 'dateformat';
+import { pick, omit, isEqual, isEmpty } from 'lodash';
import { secondsToMilliseconds } from './datetime_utility';
const MINIMUM_DATE = new Date(0);
@@ -221,3 +222,99 @@ export function getRangeType(range) {
*/
export const convertToFixedRange = dateTimeRange =>
handlers[getRangeType(dateTimeRange)](dateTimeRange);
+
+/**
+ * Returns a copy of the object only with time range
+ * properties relevant to time range calculation.
+ *
+ * Filtered properties are:
+ * - 'start'
+ * - 'end'
+ * - 'anchor'
+ * - 'duration'
+ * - 'direction': if direction is already the default, its removed.
+ *
+ * @param {Object} timeRange - A time range object
+ * @returns Copy of time range
+ */
+const pruneTimeRange = timeRange => {
+ const res = pick(timeRange, ['start', 'end', 'anchor', 'duration', 'direction']);
+ if (res.direction === DEFAULT_DIRECTION) {
+ return omit(res, 'direction');
+ }
+ return res;
+};
+
+/**
+ * Returns true if the time ranges are equal according to
+ * the time range calculation properties
+ *
+ * @param {Object} timeRange - A time range object
+ * @param {Object} other - Time range object to compare with.
+ * @returns true if the time ranges are equal, false otherwise
+ */
+export const isEqualTimeRanges = (timeRange, other) => {
+ const tr1 = pruneTimeRange(timeRange);
+ const tr2 = pruneTimeRange(other);
+ return isEqual(tr1, tr2);
+};
+
+/**
+ * Searches for a time range in a array of time ranges using
+ * only the properies relevant to time ranges calculation.
+ *
+ * @param {Object} timeRange - Time range to search (needle)
+ * @param {Array} timeRanges - Array of time tanges (haystack)
+ */
+export const findTimeRange = (timeRange, timeRanges) =>
+ timeRanges.find(element => isEqualTimeRanges(element, timeRange));
+
+// Time Ranges as URL Parameters Utils
+
+/**
+ * List of possible time ranges parameters
+ */
+export const timeRangeParamNames = ['start', 'end', 'anchor', 'duration_seconds', 'direction'];
+
+/**
+ * Converts a valid time range to a flat key-value pairs object.
+ *
+ * Duration is flatted to avoid having nested objects.
+ *
+ * @param {Object} A time range
+ * @returns key-value pairs object that can be used as parameters in a URL.
+ */
+export const timeRangeToParams = timeRange => {
+ let params = pruneTimeRange(timeRange);
+ if (timeRange.duration) {
+ const durationParms = {};
+ Object.keys(timeRange.duration).forEach(key => {
+ durationParms[`duration_${key}`] = timeRange.duration[key].toString();
+ });
+ params = { ...durationParms, ...params };
+ params = omit(params, 'duration');
+ }
+ return params;
+};
+
+/**
+ * Converts a valid set of flat params to a time range object
+ *
+ * Parameters that are not part of time range object are ignored.
+ *
+ * @param {params} params - key-value pairs object.
+ */
+export const timeRangeFromParams = params => {
+ const timeRangeParams = pick(params, timeRangeParamNames);
+ let range = Object.entries(timeRangeParams).reduce((acc, [key, val]) => {
+ // unflatten duration
+ if (key.startsWith('duration_')) {
+ acc.duration = acc.duration || {};
+ acc.duration[key.slice('duration_'.length)] = parseInt(val, 10);
+ return acc;
+ }
+ return { [key]: val, ...acc };
+ }, {});
+ range = pruneTimeRange(range);
+ return !isEmpty(range) ? range : null;
+};
diff --git a/app/assets/javascripts/monitoring/utils.js b/app/assets/javascripts/monitoring/utils.js
index 3847d885f9a..915812596c6 100644
--- a/app/assets/javascripts/monitoring/utils.js
+++ b/app/assets/javascripts/monitoring/utils.js
@@ -1,3 +1,10 @@
+import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
+import {
+ timeRangeParamNames,
+ timeRangeFromParams,
+ timeRangeToParams,
+} from '~/lib/utils/datetime_range';
+
/**
* This method is used to validate if the graph data format for a chart component
* that needs a time series as a response from a prometheus query (query_range) is
@@ -93,4 +100,35 @@ export const graphDataValidatorForAnomalyValues = graphData => {
);
};
+/**
+ * Returns a time range from the current URL params
+ *
+ * @returns {Object} The time range defined by the
+ * current URL, reading from `window.location.search`
+ */
+export const timeRangeFromUrl = (search = window.location.search) => {
+ const params = queryToObject(search);
+ return timeRangeFromParams(params);
+};
+
+/**
+ * Returns a URL with no time range based on the current URL.
+ *
+ * @param {String} New URL
+ */
+export const removeTimeRangeParams = (url = window.location.href) =>
+ removeParams(timeRangeParamNames, url);
+
+/**
+ * Returns a URL for the a different time range based on the
+ * current URL and a time range.
+ *
+ * @param {String} New URL
+ */
+export const timeRangeToUrl = (timeRange, url = window.location.href) => {
+ const toUrl = removeTimeRangeParams(url);
+ const params = timeRangeToParams(timeRange);
+ return mergeUrlParams(params, toUrl);
+};
+
export default {};
diff --git a/app/serializers/README.md b/app/serializers/README.md
index 93b21786015..2cbe6f9d263 100644
--- a/app/serializers/README.md
+++ b/app/serializers/README.md
@@ -64,7 +64,7 @@ A new serializer should inherit from a `BaseSerializer` class. It is necessary
to specify which serialization entity will be used to serialize a resource.
```ruby
-class MyResourceSerializer < BaseSerialize
+class MyResourceSerializer < BaseSerializer
entity MyResourceEntity
end
```
diff --git a/app/workers/concerns/worker_context.rb b/app/workers/concerns/worker_context.rb
index d85565e3446..ca006eaad5d 100644
--- a/app/workers/concerns/worker_context.rb
+++ b/app/workers/concerns/worker_context.rb
@@ -12,8 +12,46 @@ module WorkerContext
@worker_context || superclass_context
end
+ def bulk_perform_async_with_contexts(objects, arguments_proc:, context_proc:)
+ with_batch_contexts(objects, arguments_proc, context_proc) do |arguments|
+ bulk_perform_async(arguments)
+ end
+ end
+
+ def bulk_perform_in_with_contexts(delay, objects, arguments_proc:, context_proc:)
+ with_batch_contexts(objects, arguments_proc, context_proc) do |arguments|
+ bulk_perform_in(delay, arguments)
+ end
+ end
+
+ def context_for_arguments(args)
+ batch_context&.context_for(args)
+ end
+
private
+ BATCH_CONTEXT_KEY = "#{name}_batch_context"
+
+ def batch_context
+ Thread.current[BATCH_CONTEXT_KEY]
+ end
+
+ def batch_context=(value)
+ Thread.current[BATCH_CONTEXT_KEY] = value
+ end
+
+ def with_batch_contexts(objects, arguments_proc, context_proc)
+ self.batch_context = Gitlab::BatchWorkerContext.new(
+ objects,
+ arguments_proc: arguments_proc,
+ context_proc: context_proc
+ )
+
+ yield(batch_context.arguments)
+ ensure
+ self.batch_context = nil
+ end
+
def superclass_context
return unless superclass.include?(WorkerContext)
diff --git a/changelogs/unreleased/197894.yml b/changelogs/unreleased/197894.yml
new file mode 100644
index 00000000000..7cdcdf4be95
--- /dev/null
+++ b/changelogs/unreleased/197894.yml
@@ -0,0 +1,5 @@
+---
+title: MVC for assignees avatar dissapearing when opening issue sidebar in board
+merge_request:
+author: Oregand
+type: fixed
diff --git a/doc/administration/raketasks/project_import_export.md b/doc/administration/raketasks/project_import_export.md
index f782a24e654..05045fe45ab 100644
--- a/doc/administration/raketasks/project_import_export.md
+++ b/doc/administration/raketasks/project_import_export.md
@@ -1,17 +1,14 @@
# Project import/export administration **(CORE ONLY)**
->**Note:**
->
-> - [Introduced][ce-3050] in GitLab 8.9.
-> - Importing will not be possible if the import instance version is lower
-> than that of the exporter.
-> - For existing installations, the project import option has to be enabled in
-> application settings (`/admin/application_settings`) under 'Import sources'.
-> - The exports are stored in a temporary [shared directory][tmp] and are deleted
-> every 24 hours by a specific worker.
-> - ImportExport can use object storage automatically starting from GitLab 11.3
-
-The GitLab Import/Export version can be checked by using:
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
+> - From GitLab 11.3, import/export can use object storage automatically.
+
+See also:
+
+- [Project import/export documentation](../../user/project/settings/import_export.md).
+- [Project import/export API](../../api/project_import_export.md).
+
+The GitLab import/export version can be checked by using the following command:
```shell
# Omnibus installations
@@ -21,7 +18,7 @@ sudo gitlab-rake gitlab:import_export:version
bundle exec rake gitlab:import_export:version RAILS_ENV=production
```
-The current list of DB tables that will get exported can be listed by using:
+The current list of DB tables that will be exported can be listed by using the following command:
```shell
# Omnibus installations
@@ -31,5 +28,13 @@ sudo gitlab-rake gitlab:import_export:data
bundle exec rake gitlab:import_export:data RAILS_ENV=production
```
-[ce-3050]: https://gitlab.com/gitlab-org/gitlab-foss/issues/3050
-[tmp]: ../../development/shared_files.md
+## Important notes
+
+Note the following:
+
+- Importing is not possible if the version of the import instance is older than that of the exporter.
+- The project import option must be enabled in
+ application settings (`/admin/application_settings`) under **Import sources**, which is available
+ under **{admin}** **Admin Area >** **{settings}** **Settings > Visibility and access controls**.
+- The exports are stored in a temporary [shared directory](../../development/shared_files.md)
+ and are deleted every 24 hours by a specific worker.
diff --git a/doc/api/project_import_export.md b/doc/api/project_import_export.md
index b239c59bc66..d1aaa01d37c 100644
--- a/doc/api/project_import_export.md
+++ b/doc/api/project_import_export.md
@@ -1,20 +1,22 @@
# Project import/export API
-> [Introduced][ce-41899] in GitLab 10.6.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/41899) in GitLab 10.6.
-See also the [project import/export documentation](../user/project/settings/import_export.md).
+See also:
+
+- [Project import/export documentation](../user/project/settings/import_export.md).
+- [Project import/export administration rake tasks](../administration/raketasks/project_import_export.md). **(CORE ONLY)**
## Schedule an export
Start a new export.
-The endpoint also accepts an `upload` param. This param is a hash that contains
+The endpoint also accepts an `upload` parameter. This parameter is a hash that contains
all the necessary information to upload the exported project to a web server or
to any S3-compatible platform. At the moment we only support binary
data file uploads to the final server.
-If the `upload` params is present, `upload[url]` param is required.
- (**Note:** This feature was introduced in GitLab 10.7)
+From GitLab 10.7, the `upload[url]` parameter is required if the `upload` parameter is present.
```text
POST /projects/:id/export
@@ -56,8 +58,14 @@ GET /projects/:id/export
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/export
```
-Status can be one of `none`, `started`, `after_export_action` or `finished`. The
-`after_export_action` state represents that the export process has been completed successfully and
+Status can be one of:
+
+- `none`
+- `started`
+- `after_export_action`
+- `finished`
+
+The `after_export_action` state represents that the export process has been completed successfully and
the platform is performing some actions on the resulted file. For example, sending
an email notifying the user to download the file, uploading the exported file
to a web server, etc.
@@ -178,7 +186,13 @@ GET /projects/:id/import
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/import
```
-Status can be one of `none`, `scheduled`, `failed`, `started`, or `finished`.
+Status can be one of:
+
+- `none`
+- `scheduled`
+- `failed`
+- `started`
+- `finished`
If the status is `failed`, it will include the import error message under `import_error`.
@@ -194,5 +208,3 @@ If the status is `failed`, it will include the import error message under `impor
"import_status": "started"
}
```
-
-[ce-41899]: https://gitlab.com/gitlab-org/gitlab-foss/issues/41899
diff --git a/doc/fixtures/gitlab_tanuki.png b/doc/fixtures/gitlab_tanuki.png
new file mode 100644
index 00000000000..551dd6ce2ce
--- /dev/null
+++ b/doc/fixtures/gitlab_tanuki.png
Binary files differ
diff --git a/doc/user/project/settings/import_export.md b/doc/user/project/settings/import_export.md
index 2c90a17f37e..c4a4f4f14ee 100644
--- a/doc/user/project/settings/import_export.md
+++ b/doc/user/project/settings/import_export.md
@@ -1,40 +1,44 @@
# Project import/export
->**Notes:**
->
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
-> - Importing will not be possible if the import instance version differs from
-> that of the exporter.
-> - For GitLab admins, please read through
-> [Project import/export administration](../../../administration/raketasks/project_import_export.md).
-> - For existing installations, the project import option has to be enabled in
-> application settings (`/admin/application_settings`) under 'Import sources'.
-> Ask your administrator if you don't see the **GitLab export** button when
-> creating a new project.
-> - Starting with GitLab 10.0, administrators can disable the project export option
-> on the GitLab instance in application settings (`/admin/application_settings`)
-> under 'Visibility and Access Controls'.
-> - You can find some useful raketasks if you are an administrator in the
-> [import_export](../../../administration/raketasks/project_import_export.md) raketask.
-> - The exports are stored in a temporary [shared directory](../../../development/shared_files.md)
-> and are deleted every 24 hours by a specific worker.
-> - Group members will get exported as project members, as long as the user has
-> maintainer or admin access to the group where the exported project lives. An admin
-> in the import side is required to map the users, based on email or username.
-> Otherwise, a supplementary comment is left to mention the original author and
-> the MRs, notes or issues will be owned by the importer.
-> - Project members with owner access will get imported as maintainers.
-> - Control project Import/Export with the [API](../../../api/project_import_export.md).
-> - If an imported project contains merge requests originated from forks,
-> then new branches associated with such merge requests will be created
-> within a project during the import/export. Thus, the number of branches
-> in the exported project could be bigger than in the original project.
-
-Existing projects running on any GitLab instance or GitLab.com can be exported
-with all their related data and be moved into a new GitLab instance.
+> - From GitLab 10.0, administrators can disable the project export option on the GitLab instance.
+
+Existing projects running on any GitLab instance or GitLab.com can be exported with all their related
+data and be moved into a new GitLab instance.
+
+See also:
+
+- [Project import/export API](../../../api/project_import_export.md).
+- [Project import/export administration rake tasks](../../../administration/raketasks/project_import_export.md). **(CORE ONLY)**
+
+## Important notes
+
+Note the following:
+
+- Importing is not possible if the import instance version differs from
+ that of the exporter.
+- The project import option must be enabled in
+ application settings (`/admin/application_settings`) under under **Import sources**, which is
+ available under **{admin}** **Admin Area >** **{settings}** **Settings > Visibility and access controls**.
+ Ask your administrator if you don't see the **GitLab export** button when
+ creating a new project.
+- The exports are stored in a temporary [shared directory](../../../development/shared_files.md)
+ and are deleted every 24 hours by a specific worker.
+- Group members are exported as project members, as long as the user has
+ maintainer or admin access to the group where the exported project lives. An admin
+ in the import side is required to map the users, based on email or username.
+ Otherwise, a supplementary comment is left to mention the original author and
+ the MRs, notes, or issues will be owned by the importer.
+- Project members with owner access will be imported as maintainers.
+- If an imported project contains merge requests originating from forks,
+ then new branches associated with such merge requests will be created
+ within a project during the import/export. Thus, the number of branches
+ in the exported project could be bigger than in the original project.
## Version history
+The following table lists updates to Import/Export:
+
| GitLab version | Import/Export version |
| ---------------- | --------------------- |
| 11.1 to current | 0.2.4 |
@@ -52,9 +56,8 @@ with all their related data and be moved into a new GitLab instance.
| 8.9.5 | 0.1.1 |
| 8.9.0 | 0.1.0 |
- > The table reflects what GitLab version we updated the Import/Export version at.
- > For instance, 8.10.3 and 8.11 will have the same Import/Export version (0.1.3)
- > and the exports between them will be compatible.
+For example, 8.10.3 and 8.11 will have the same Import/Export version (0.1.3)
+and the exports between them will be compatible.
## Exported contents
@@ -88,7 +91,7 @@ For more details on the specific data persisted in a project export, see the
1. Go to your project's homepage.
-1. Click **Settings** in the sidebar.
+1. Click **{settings}** **Settings** in the sidebar.
1. Scroll down to find the **Export project** button:
diff --git a/lib/gitlab/batch_worker_context.rb b/lib/gitlab/batch_worker_context.rb
new file mode 100644
index 00000000000..0589206fefc
--- /dev/null
+++ b/lib/gitlab/batch_worker_context.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class BatchWorkerContext
+ def initialize(objects, arguments_proc:, context_proc:)
+ @objects = objects
+ @arguments_proc = arguments_proc
+ @context_proc = context_proc
+ end
+
+ def arguments
+ context_by_arguments.keys
+ end
+
+ def context_for(arguments)
+ context_by_arguments[arguments]
+ end
+
+ private
+
+ attr_reader :objects, :arguments_proc, :context_proc
+
+ def context_by_arguments
+ @context_by_arguments ||= objects.each_with_object({}) do |object, result|
+ arguments = Array.wrap(arguments_proc.call(object))
+ context = Gitlab::ApplicationContext.new(context_proc.call(object))
+
+ result[arguments] = context
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware.rb b/lib/gitlab/sidekiq_middleware.rb
index b19853a1702..439d45b7a14 100644
--- a/lib/gitlab/sidekiq_middleware.rb
+++ b/lib/gitlab/sidekiq_middleware.rb
@@ -29,6 +29,7 @@ module Gitlab
lambda do |chain|
chain.add Gitlab::SidekiqStatus::ClientMiddleware
chain.add Gitlab::SidekiqMiddleware::ClientMetrics
+ chain.add Gitlab::SidekiqMiddleware::WorkerContext::Client # needs to be before the Labkit middleware
chain.add Labkit::Middleware::Sidekiq::Client
end
end
diff --git a/lib/gitlab/sidekiq_middleware/worker_context.rb b/lib/gitlab/sidekiq_middleware/worker_context.rb
new file mode 100644
index 00000000000..897a9211948
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/worker_context.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ module WorkerContext
+ private
+
+ def wrap_in_optional_context(context_or_nil, &block)
+ return yield unless context_or_nil
+
+ context_or_nil.use(&block)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/worker_context/client.rb b/lib/gitlab/sidekiq_middleware/worker_context/client.rb
new file mode 100644
index 00000000000..0eb52179db2
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/worker_context/client.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module SidekiqMiddleware
+ module WorkerContext
+ class Client
+ include Gitlab::SidekiqMiddleware::WorkerContext
+
+ def call(worker_class_or_name, job, _queue, _redis_pool, &block)
+ worker_class = worker_class_or_name.to_s.safe_constantize
+
+ # Mailers can't be constantized like this
+ return yield unless worker_class
+ return yield unless worker_class.include?(::ApplicationWorker)
+
+ context_for_args = worker_class.context_for_arguments(job['args'])
+
+ wrap_in_optional_context(context_for_args, &block)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/worker_context/server.rb b/lib/gitlab/sidekiq_middleware/worker_context/server.rb
index 29d98ad16a9..d2d84742c17 100644
--- a/lib/gitlab/sidekiq_middleware/worker_context/server.rb
+++ b/lib/gitlab/sidekiq_middleware/worker_context/server.rb
@@ -4,6 +4,8 @@ module Gitlab
module SidekiqMiddleware
module WorkerContext
class Server
+ include Gitlab::SidekiqMiddleware::WorkerContext
+
def call(worker, job, _queue, &block)
worker_class = worker.class
@@ -13,14 +15,6 @@ module Gitlab
# Use the context defined on the class level as a base context
wrap_in_optional_context(worker_class.get_worker_context, &block)
end
-
- private
-
- def wrap_in_optional_context(context, &block)
- return yield unless context
-
- context.use(&block)
- end
end
end
end
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 69b5605c0e5..3fce296ef90 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -111,7 +111,7 @@ describe HelpController do
it 'renders the raw file' do
get :show,
params: {
- path: 'user/project/img/labels_default_v12_1'
+ path: 'fixtures/gitlab_tanuki'
},
format: :png
expect(response).to be_successful
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_spec.js
index 08cffed5f00..526cdb81ac6 100644
--- a/spec/frontend/boards/issue_card_spec.js
+++ b/spec/frontend/boards/issue_card_spec.js
@@ -97,6 +97,9 @@ describe('Issue card component', () => {
issue: {
...wrapper.props('issue'),
assignees: [user],
+ updateData(newData) {
+ Object.assign(this, newData);
+ },
},
});
@@ -118,6 +121,28 @@ describe('Issue card component', () => {
it('renders avatar', () => {
expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
});
+
+ it('renders the avatar using avatar_url property', done => {
+ wrapper.props('issue').updateData({
+ ...wrapper.props('issue'),
+ assignees: [
+ {
+ id: '1',
+ name: 'test',
+ state: 'active',
+ username: 'test_name',
+ avatar_url: 'test_image_from_avatar_url',
+ },
+ ],
+ });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
+ 'test_image_from_avatar_url?width=24',
+ );
+ done();
+ });
+ });
});
describe('assignee default avatar', () => {
diff --git a/spec/frontend/lib/utils/datetime_range_spec.js b/spec/frontend/lib/utils/datetime_range_spec.js
index 13eb69e1761..8b1f284615d 100644
--- a/spec/frontend/lib/utils/datetime_range_spec.js
+++ b/spec/frontend/lib/utils/datetime_range_spec.js
@@ -1,10 +1,46 @@
import _ from 'lodash';
-import { getRangeType, convertToFixedRange } from '~/lib/utils/datetime_range';
+import {
+ getRangeType,
+ convertToFixedRange,
+ isEqualTimeRanges,
+ findTimeRange,
+ timeRangeToParams,
+ timeRangeFromParams,
+} from '~/lib/utils/datetime_range';
const MOCK_NOW = Date.UTC(2020, 0, 23, 20);
const MOCK_NOW_ISO_STRING = new Date(MOCK_NOW).toISOString();
+const mockFixedRange = {
+ label: 'January 2020',
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-31T23:59:00.000Z',
+};
+
+const mockAnchoredRange = {
+ label: 'First two minutes of 2020',
+ anchor: '2020-01-01T00:00:00.000Z',
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+};
+
+const mockRollingRange = {
+ label: 'Next 2 minutes',
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+};
+
+const mockOpenRange = {
+ label: '2020 so far',
+ anchor: '2020-01-01T00:00:00.000Z',
+ direction: 'after',
+};
+
describe('Date time range utils', () => {
describe('getRangeType', () => {
it('infers correctly the range type from the input object', () => {
@@ -43,38 +79,28 @@ describe('Date time range utils', () => {
});
describe('When a fixed range is input', () => {
- const defaultFixedRange = {
- start: '2020-01-01T00:00:00.000Z',
- end: '2020-01-31T23:59:00.000Z',
- label: 'January 2020',
- };
-
- const mockFixedRange = params => ({ ...defaultFixedRange, ...params });
-
it('converts a fixed range to an equal fixed range', () => {
- const aFixedRange = mockFixedRange();
-
- expect(convertToFixedRange(aFixedRange)).toEqual({
- start: defaultFixedRange.start,
- end: defaultFixedRange.end,
+ expect(convertToFixedRange(mockFixedRange)).toEqual({
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
});
});
it('throws an error when fixed range does not contain an end time', () => {
- const aFixedRangeMissingEnd = _.omit(mockFixedRange(), 'end');
+ const aFixedRangeMissingEnd = _.omit(mockFixedRange, 'end');
expect(() => convertToFixedRange(aFixedRangeMissingEnd)).toThrow();
});
it('throws an error when fixed range does not contain a start time', () => {
- const aFixedRangeMissingStart = _.omit(mockFixedRange(), 'start');
+ const aFixedRangeMissingStart = _.omit(mockFixedRange, 'start');
expect(() => convertToFixedRange(aFixedRangeMissingStart)).toThrow();
});
it('throws an error when the dates cannot be parsed', () => {
- const wrongStart = mockFixedRange({ start: 'I_CANNOT_BE_PARSED' });
- const wrongEnd = mockFixedRange({ end: 'I_CANNOT_BE_PARSED' });
+ const wrongStart = { ...mockFixedRange, start: 'I_CANNOT_BE_PARSED' };
+ const wrongEnd = { ...mockFixedRange, end: 'I_CANNOT_BE_PARSED' };
expect(() => convertToFixedRange(wrongStart)).toThrow();
expect(() => convertToFixedRange(wrongEnd)).toThrow();
@@ -82,97 +108,61 @@ describe('Date time range utils', () => {
});
describe('When an anchored range is input', () => {
- const defaultAnchoredRange = {
- anchor: '2020-01-01T00:00:00.000Z',
- direction: 'after',
- duration: {
- seconds: 60 * 2,
- },
- label: 'First two minutes of 2020',
- };
- const mockAnchoredRange = params => ({ ...defaultAnchoredRange, ...params });
-
it('converts to a fixed range', () => {
- const anAnchoredRange = mockAnchoredRange();
-
- expect(convertToFixedRange(anAnchoredRange)).toEqual({
+ expect(convertToFixedRange(mockAnchoredRange)).toEqual({
start: '2020-01-01T00:00:00.000Z',
end: '2020-01-01T00:02:00.000Z',
});
});
it('converts to a fixed range with a `before` direction', () => {
- const anAnchoredRange = mockAnchoredRange({ direction: 'before' });
-
- expect(convertToFixedRange(anAnchoredRange)).toEqual({
+ expect(convertToFixedRange({ ...mockAnchoredRange, direction: 'before' })).toEqual({
start: '2019-12-31T23:58:00.000Z',
end: '2020-01-01T00:00:00.000Z',
});
});
it('converts to a fixed range without an explicit direction, defaulting to `before`', () => {
- const anAnchoredRange = _.omit(mockAnchoredRange(), 'direction');
+ const defaultDirectionRange = _.omit(mockAnchoredRange, 'direction');
- expect(convertToFixedRange(anAnchoredRange)).toEqual({
+ expect(convertToFixedRange(defaultDirectionRange)).toEqual({
start: '2019-12-31T23:58:00.000Z',
end: '2020-01-01T00:00:00.000Z',
});
});
it('throws an error when the anchor cannot be parsed', () => {
- const wrongAnchor = mockAnchoredRange({ anchor: 'I_CANNOT_BE_PARSED' });
+ const wrongAnchor = { ...mockAnchoredRange, anchor: 'I_CANNOT_BE_PARSED' };
+
expect(() => convertToFixedRange(wrongAnchor)).toThrow();
});
});
describe('when a rolling range is input', () => {
it('converts to a fixed range', () => {
- const aRollingRange = {
- direction: 'after',
- duration: {
- seconds: 60 * 2,
- },
- label: 'Next 2 minutes',
- };
-
- expect(convertToFixedRange(aRollingRange)).toEqual({
+ expect(convertToFixedRange(mockRollingRange)).toEqual({
start: '2020-01-23T20:00:00.000Z',
end: '2020-01-23T20:02:00.000Z',
});
});
it('converts to a fixed range with an implicit `before` direction', () => {
- const aRollingRangeWithNoDirection = {
- duration: {
- seconds: 60 * 2,
- },
- label: 'Last 2 minutes',
- };
+ const noDirection = _.omit(mockRollingRange, 'direction');
- expect(convertToFixedRange(aRollingRangeWithNoDirection)).toEqual({
+ expect(convertToFixedRange(noDirection)).toEqual({
start: '2020-01-23T19:58:00.000Z',
end: '2020-01-23T20:00:00.000Z',
});
});
it('throws an error when the duration is not in the right format', () => {
- const wrongDuration = {
- direction: 'before',
- duration: {
- minutes: 20,
- },
- label: 'Last 20 minutes',
- };
+ const wrongDuration = { ...mockRollingRange, duration: { minutes: 20 } };
expect(() => convertToFixedRange(wrongDuration)).toThrow();
});
it('throws an error when the anchor is not valid', () => {
- const wrongAnchor = {
- anchor: 'CAN_T_PARSE_THIS',
- direction: 'after',
- label: '2020 so far',
- };
+ const wrongAnchor = { ...mockRollingRange, anchor: 'CAN_T_PARSE_THIS' };
expect(() => convertToFixedRange(wrongAnchor)).toThrow();
});
@@ -180,52 +170,213 @@ describe('Date time range utils', () => {
describe('when an open range is input', () => {
it('converts to a fixed range with an `after` direction', () => {
- const soFar2020 = {
- anchor: '2020-01-01T00:00:00.000Z',
- direction: 'after',
- label: '2020 so far',
- };
-
- expect(convertToFixedRange(soFar2020)).toEqual({
+ expect(convertToFixedRange(mockOpenRange)).toEqual({
start: '2020-01-01T00:00:00.000Z',
end: '2020-01-23T20:00:00.000Z',
});
});
it('converts to a fixed range with the explicit `before` direction', () => {
- const before2020 = {
- anchor: '2020-01-01T00:00:00.000Z',
- direction: 'before',
- label: 'Before 2020',
- };
+ const beforeOpenRange = { ...mockOpenRange, direction: 'before' };
- expect(convertToFixedRange(before2020)).toEqual({
+ expect(convertToFixedRange(beforeOpenRange)).toEqual({
start: '1970-01-01T00:00:00.000Z',
end: '2020-01-01T00:00:00.000Z',
});
});
it('converts to a fixed range with the implicit `before` direction', () => {
- const alsoBefore2020 = {
- anchor: '2020-01-01T00:00:00.000Z',
- label: 'Before 2020',
- };
+ const noDirectionOpenRange = _.omit(mockOpenRange, 'direction');
- expect(convertToFixedRange(alsoBefore2020)).toEqual({
+ expect(convertToFixedRange(noDirectionOpenRange)).toEqual({
start: '1970-01-01T00:00:00.000Z',
end: '2020-01-01T00:00:00.000Z',
});
});
it('throws an error when the anchor cannot be parsed', () => {
- const wrongAnchor = {
- anchor: 'CAN_T_PARSE_THIS',
- direction: 'after',
- label: '2020 so far',
- };
+ const wrongAnchor = { ...mockOpenRange, anchor: 'CAN_T_PARSE_THIS' };
expect(() => convertToFixedRange(wrongAnchor)).toThrow();
});
});
});
+
+ describe('isEqualTimeRanges', () => {
+ it('equal only compares relevant properies', () => {
+ expect(
+ isEqualTimeRanges(
+ {
+ ...mockFixedRange,
+ label: 'A label',
+ default: true,
+ },
+ {
+ ...mockFixedRange,
+ label: 'Another label',
+ default: false,
+ anotherKey: 'anotherValue',
+ },
+ ),
+ ).toBe(true);
+
+ expect(
+ isEqualTimeRanges(
+ {
+ ...mockAnchoredRange,
+ label: 'A label',
+ default: true,
+ },
+ {
+ ...mockAnchoredRange,
+ anotherKey: 'anotherValue',
+ },
+ ),
+ ).toBe(true);
+ });
+ });
+
+ describe('findTimeRange', () => {
+ const timeRanges = [
+ {
+ label: 'Before 2020',
+ anchor: '2020-01-01T00:00:00.000Z',
+ },
+ {
+ label: 'Last 30 minutes',
+ duration: { seconds: 60 * 30 },
+ },
+ {
+ label: 'In 2019',
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-12-31T12:59:59.999Z',
+ },
+ {
+ label: 'Next 2 minutes',
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+ },
+ ];
+
+ it('finds a time range', () => {
+ const tr0 = {
+ anchor: '2020-01-01T00:00:00.000Z',
+ };
+ expect(findTimeRange(tr0, timeRanges)).toBe(timeRanges[0]);
+
+ const tr1 = {
+ duration: { seconds: 60 * 30 },
+ };
+ expect(findTimeRange(tr1, timeRanges)).toBe(timeRanges[1]);
+
+ const tr1Direction = {
+ direction: 'before',
+ duration: {
+ seconds: 60 * 30,
+ },
+ };
+ expect(findTimeRange(tr1Direction, timeRanges)).toBe(timeRanges[1]);
+
+ const tr2 = {
+ someOtherLabel: 'Added arbitrarily',
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-12-31T12:59:59.999Z',
+ };
+ expect(findTimeRange(tr2, timeRanges)).toBe(timeRanges[2]);
+
+ const tr3 = {
+ direction: 'after',
+ duration: {
+ seconds: 60 * 2,
+ },
+ };
+ expect(findTimeRange(tr3, timeRanges)).toBe(timeRanges[3]);
+ });
+
+ it('doesnot finds a missing time range', () => {
+ const nonExistant = {
+ direction: 'before',
+ duration: {
+ seconds: 200,
+ },
+ };
+ expect(findTimeRange(nonExistant, timeRanges)).toBeUndefined();
+ });
+ });
+
+ describe('conversion to/from params', () => {
+ const mockFixedParams = {
+ start: '2020-01-01T00:00:00.000Z',
+ end: '2020-01-31T23:59:00.000Z',
+ };
+
+ const mockAnchoredParams = {
+ anchor: '2020-01-01T00:00:00.000Z',
+ direction: 'after',
+ duration_seconds: '120',
+ };
+
+ const mockRollingParams = {
+ direction: 'after',
+ duration_seconds: '120',
+ };
+
+ describe('timeRangeToParams', () => {
+ it('converts fixed ranges to params', () => {
+ expect(timeRangeToParams(mockFixedRange)).toEqual(mockFixedParams);
+ });
+
+ it('converts anchored ranges to params', () => {
+ expect(timeRangeToParams(mockAnchoredRange)).toEqual(mockAnchoredParams);
+ });
+
+ it('converts rolling ranges to params', () => {
+ expect(timeRangeToParams(mockRollingRange)).toEqual(mockRollingParams);
+ });
+ });
+
+ describe('timeRangeFromParams', () => {
+ it('converts fixed ranges from params', () => {
+ const params = { ...mockFixedParams, other_param: 'other_value' };
+ const expectedRange = _.omit(mockFixedRange, 'label');
+
+ expect(timeRangeFromParams(params)).toEqual(expectedRange);
+ });
+
+ it('converts anchored ranges to params', () => {
+ const expectedRange = _.omit(mockRollingRange, 'label');
+
+ expect(timeRangeFromParams(mockRollingParams)).toEqual(expectedRange);
+ });
+
+ it('converts rolling ranges from params', () => {
+ const params = { ...mockRollingParams, other_param: 'other_value' };
+ const expectedRange = _.omit(mockRollingRange, 'label');
+
+ expect(timeRangeFromParams(params)).toEqual(expectedRange);
+ });
+
+ it('converts rolling ranges from params with a default direction', () => {
+ const params = {
+ ...mockRollingParams,
+ direction: 'before',
+ other_param: 'other_value',
+ };
+ const expectedRange = _.omit(mockRollingRange, 'label', 'direction');
+
+ expect(timeRangeFromParams(params)).toEqual(expectedRange);
+ });
+
+ it('converts to null when for no relevant params', () => {
+ const range = {
+ useless_param_1: 'value1',
+ useless_param_2: 'value2',
+ };
+
+ expect(timeRangeFromParams(range)).toBe(null);
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 9df48eb0ad3..2d9417bf971 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,13 +1,35 @@
import * as monitoringUtils from '~/monitoring/utils';
+import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
import {
+ mockHost,
+ mockProjectDir,
graphDataPrometheusQuery,
graphDataPrometheusQueryRange,
anomalyMockGraphData,
} from './mock_data';
+jest.mock('~/lib/utils/url_utility');
+
+const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
+
+const generatedLink = 'http://chart.link.com';
+
+const chartTitle = 'Some metric chart';
+
+const range = {
+ start: '2019-01-01T00:00:00.000Z',
+ end: '2019-01-10T00:00:00.000Z',
+};
+
+const rollingRange = {
+ duration: { seconds: 120 },
+};
+
describe('monitoring/utils', () => {
- const generatedLink = 'http://chart.link.com';
- const chartTitle = 'Some metric chart';
+ afterEach(() => {
+ mergeUrlParams.mockReset();
+ queryToObject.mockReset();
+ });
describe('trackGenerateLinkToChartEventOptions', () => {
it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => {
@@ -117,4 +139,75 @@ describe('monitoring/utils', () => {
expect(monitoringUtils.graphDataValidatorForAnomalyValues(fourMetrics)).toBe(false);
});
});
+
+ describe('timeRangeFromUrl', () => {
+ const { timeRangeFromUrl } = monitoringUtils;
+
+ it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
+ queryToObject.mockReturnValueOnce(range);
+
+ expect(timeRangeFromUrl()).toEqual(range);
+ });
+
+ it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
+ const { seconds } = rollingRange.duration;
+
+ queryToObject.mockReturnValueOnce({
+ dashboard: '.gitlab/dashboard/my_dashboard.yml',
+ duration_seconds: `${seconds}`,
+ });
+
+ expect(timeRangeFromUrl()).toEqual(rollingRange);
+ });
+
+ it('returns null when no time range paramters are given', () => {
+ const params = {
+ dashboard: '.gitlab/dashboards/custom_dashboard.yml',
+ param1: 'value1',
+ param2: 'value2',
+ };
+
+ expect(timeRangeFromUrl(params, mockPath)).toBe(null);
+ });
+ });
+
+ describe('removeTimeRangeParams', () => {
+ const { removeTimeRangeParams } = monitoringUtils;
+
+ it('returns when query contains `start` and `end` paramters are given', () => {
+ removeParams.mockReturnValueOnce(mockPath);
+
+ expect(removeTimeRangeParams(`${mockPath}?start=${range.start}&end=${range.end}`)).toEqual(
+ mockPath,
+ );
+ });
+ });
+
+ describe('timeRangeToUrl', () => {
+ const { timeRangeToUrl } = monitoringUtils;
+
+ it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
+ const toUrl = `${mockPath}?start=${range.start}&end=${range.end}`;
+ const fromUrl = mockPath;
+
+ removeParams.mockReturnValueOnce(fromUrl);
+ mergeUrlParams.mockReturnValueOnce(toUrl);
+
+ expect(timeRangeToUrl(range)).toEqual(toUrl);
+ expect(mergeUrlParams).toHaveBeenCalledWith(range, fromUrl);
+ });
+
+ it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
+ const { seconds } = rollingRange.duration;
+
+ const toUrl = `${mockPath}?duration_seconds=${seconds}`;
+ const fromUrl = mockPath;
+
+ removeParams.mockReturnValueOnce(fromUrl);
+ mergeUrlParams.mockReturnValueOnce(toUrl);
+
+ expect(timeRangeToUrl(rollingRange)).toEqual(toUrl);
+ expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: `${seconds}` }, fromUrl);
+ });
+ });
});
diff --git a/spec/lib/gitlab/batch_worker_context_spec.rb b/spec/lib/gitlab/batch_worker_context_spec.rb
new file mode 100644
index 00000000000..0ba30287ae5
--- /dev/null
+++ b/spec/lib/gitlab/batch_worker_context_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BatchWorkerContext do
+ subject(:batch_context) do
+ described_class.new(
+ %w(hello world),
+ arguments_proc: -> (word) { word },
+ context_proc: -> (word) { { user: build_stubbed(:user, username: word) } }
+ )
+ end
+
+ describe "#arguments" do
+ it "returns all the expected arguments in arrays" do
+ expect(batch_context.arguments).to eq([%w(hello), %w(world)])
+ end
+ end
+
+ describe "#context_for" do
+ it "returns the correct application context for the arguments" do
+ context = batch_context.context_for(%w(world))
+
+ expect(context).to be_a(Gitlab::ApplicationContext)
+ expect(context.to_lazy_hash[:user].call).to eq("world")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
new file mode 100644
index 00000000000..9cb89b1bc10
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestWithContextWorker'
+ end
+
+ include ApplicationWorker
+
+ def self.job_for_args(args)
+ jobs.find { |job| job['args'] == args }
+ end
+
+ def perform(*args)
+ end
+ end
+ end
+
+ before do
+ stub_const('TestWithContextWorker', worker_class)
+ end
+
+ describe "#call" do
+ it 'applies a context for jobs scheduled in batch' do
+ user_per_job = { 'job1' => build_stubbed(:user, username: 'user-1'),
+ 'job2' => build_stubbed(:user, username: 'user-2') }
+
+ TestWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (name) { { user: user_per_job[name] } }
+ )
+
+ job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.user']).to eq(user_per_job['job1'].username)
+ expect(job2['meta.user']).to eq(user_per_job['job2'].username)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index b3c0a5b04f0..e8dcbbd2ee1 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -110,6 +110,14 @@ describe Gitlab::SidekiqMiddleware do
let(:queue) { 'default' }
let(:redis_pool) { Sidekiq.redis_pool }
let(:middleware_expected_args) { [worker_class_arg, job, queue, redis_pool] }
+ let(:expected_middlewares) do
+ [
+ Gitlab::SidekiqStatus::ClientMiddleware,
+ Gitlab::SidekiqMiddleware::ClientMetrics,
+ Gitlab::SidekiqMiddleware::WorkerContext::Client,
+ Labkit::Middleware::Sidekiq::Client
+ ]
+ end
before do
described_class.client_configurator.call(chain)
@@ -120,8 +128,9 @@ describe Gitlab::SidekiqMiddleware do
# this will prevent the full middleware chain from being executed.
# This test ensures that this does not happen
it "invokes the chain" do
- expect_any_instance_of(Gitlab::SidekiqStatus::ClientMiddleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
- expect_any_instance_of(Labkit::Middleware::Sidekiq::Client).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ expected_middlewares do |middleware|
+ expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.ordered.and_call_original
+ end
expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once
end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 5da707b11f9..77e1f6bcaa3 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
module DbCleaner
- def delete_from_all_tables!(except: nil)
+ def delete_from_all_tables!(except: [])
+ except << 'ar_internal_metadata'
+
DatabaseCleaner.clean_with(:deletion, cache_tables: false, except: except)
end
diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb
index a7d0ba2b8bd..97a88eecd73 100644
--- a/spec/workers/concerns/worker_context_spec.rb
+++ b/spec/workers/concerns/worker_context_spec.rb
@@ -5,7 +5,11 @@ require 'spec_helper'
describe WorkerContext do
let(:worker) do
Class.new do
- include WorkerContext
+ def self.name
+ "TestWorker"
+ end
+
+ include ApplicationWorker
end
end
@@ -24,6 +28,78 @@ describe WorkerContext do
end
end
+ shared_examples 'tracking bulk scheduling contexts' do
+ describe "context contents" do
+ before do
+ # stub clearing the contexts, so we can check what's inside
+ allow(worker).to receive(:batch_context=).and_call_original
+ allow(worker).to receive(:batch_context=).with(nil)
+ end
+
+ it 'keeps track of the context per key to schedule' do
+ subject
+
+ expect(worker.context_for_arguments(["hello"])).to be_a(Gitlab::ApplicationContext)
+ end
+
+ it 'does not share contexts across threads' do
+ t1_context = nil
+ t2_context = nil
+
+ Thread.new do
+ subject
+
+ t1_context = worker.context_for_arguments(["hello"])
+ end.join
+ Thread.new do
+ t2_context = worker.context_for_arguments(["hello"])
+ end.join
+
+ expect(t1_context).to be_a(Gitlab::ApplicationContext)
+ expect(t2_context).to be_nil
+ end
+ end
+
+ it 'clears the contexts' do
+ subject
+
+ expect(worker.__send__(:batch_context)).to be_nil
+ end
+ end
+
+ describe '.bulk_perform_async_with_contexts' do
+ subject do
+ worker.bulk_perform_async_with_contexts(%w(hello world),
+ context_proc: -> (_) { { user: build_stubbed(:user) } },
+ arguments_proc: -> (word) { word })
+ end
+
+ it 'calls bulk_perform_async with the arguments' do
+ expect(worker).to receive(:bulk_perform_async).with([["hello"], ["world"]])
+
+ subject
+ end
+
+ it_behaves_like 'tracking bulk scheduling contexts'
+ end
+
+ describe '.bulk_perform_in_with_contexts' do
+ subject do
+ worker.bulk_perform_in_with_contexts(10.minutes,
+ %w(hello world),
+ context_proc: -> (_) { { user: build_stubbed(:user) } },
+ arguments_proc: -> (word) { word })
+ end
+
+ it 'calls bulk_perform_in with the arguments and delay' do
+ expect(worker).to receive(:bulk_perform_in).with(10.minutes, [["hello"], ["world"]])
+
+ subject
+ end
+
+ it_behaves_like 'tracking bulk scheduling contexts'
+ end
+
describe '#with_context' do
it 'allows modifying context when the job is running' do
worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do