Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable Rubocop cop to omit parentheses from one-line method calls #2778

Merged
merged 18 commits into from Feb 15, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 5 additions & 0 deletions google-cloud-bigquery/.rubocop.yml
Expand Up @@ -13,6 +13,11 @@ Documentation:

Style/StringLiterals:
EnforcedStyle: double_quotes
Style/MethodCallWithArgsParentheses:
Enabled: true
EnforcedStyle: omit_parentheses
AllowParenthesesInMultilineCall: true
AllowParenthesesInCamelCaseMethod: true
Style/MethodDefParentheses:
EnforcedStyle: require_no_parentheses
Style/NumericLiterals:
Expand Down
14 changes: 14 additions & 0 deletions google-cloud-bigquery/lib/google/cloud/bigquery/convert.rb
Expand Up @@ -360,6 +360,20 @@ def self.derive_source_format path
return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
nil
end

##
# @private
#
# Converts a primitive time value in milliseconds to a Ruby Time object.
#
# @return [Time, nil] The Ruby Time object, or nil if the given argument
# is nil.
def self.millis_to_time time_millis
return nil unless time_millis
time_millis = Integer time_millis
time_secs = time_millis / 1000.0
::Time.at time_secs
end
end

# rubocop:enable Metrics/ModuleLength
Expand Down
6 changes: 3 additions & 3 deletions google-cloud-bigquery/lib/google/cloud/bigquery/data.rb
Expand Up @@ -439,9 +439,9 @@ def next
#
def all request_limit: nil
request_limit = request_limit.to_i if request_limit
unless block_given?
return enum_for(:all, request_limit: request_limit)
end

return enum_for :all, request_limit: request_limit unless block_given?

results = self
loop do
results.each { |r| yield r }
Expand Down
15 changes: 4 additions & 11 deletions google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb
Expand Up @@ -20,6 +20,7 @@
require "google/cloud/bigquery/external"
require "google/cloud/bigquery/dataset/list"
require "google/cloud/bigquery/dataset/access"
require "google/cloud/bigquery/convert"
require "google/apis/bigquery_v2"

module Google
Expand Down Expand Up @@ -237,11 +238,7 @@ def default_expiration= new_default_expiration
def created_at
return nil if reference?
ensure_full_data!
begin
::Time.at(Integer(@gapi.creation_time) / 1000.0)
rescue StandardError
nil
end
Convert.millis_to_time @gapi.creation_time
end

##
Expand All @@ -255,11 +252,7 @@ def created_at
def modified_at
return nil if reference?
ensure_full_data!
begin
::Time.at(Integer(@gapi.last_modified_time) / 1000.0)
rescue StandardError
nil
end
Convert.millis_to_time @gapi.last_modified_time
end

##
Expand Down Expand Up @@ -2261,7 +2254,7 @@ def udfs_gapi array_or_str
return [] if array_or_str.nil?
Array(array_or_str).map do |uri_or_code|
resource = Google::Apis::BigqueryV2::UserDefinedFunctionResource.new
if uri_or_code.start_with?("gs://")
if uri_or_code.start_with? "gs://"
resource.resource_uri = uri_or_code
else
resource.inline_code = uri_or_code
Expand Down
Expand Up @@ -935,10 +935,10 @@ def validate_view view

# @private
def add_access_role_scope_value role, scope, value
role = validate_role(role)
role = validate_role role
scope = validate_scope scope
# If scope is special group, make sure value is in the list
value = validate_special_group(value) if scope == :special_group
value = validate_special_group value if scope == :special_group
# Remove any rules of this scope and value
@rules.reject!(&find_by_scope_and_value(scope, value))
# Add new rule for this role, scope, and value
Expand All @@ -949,7 +949,7 @@ def add_access_role_scope_value role, scope, value
# @private
def add_access_view value
# scope is view, make sure value is in the right format
value = validate_view(value)
value = validate_view value
# Remove existing view rule, if any
@rules.reject!(&find_view(value))
# Add new rule for this role, scope, and value
Expand All @@ -959,10 +959,10 @@ def add_access_view value

# @private
def remove_access_role_scope_value role, scope, value
role = validate_role(role)
role = validate_role role
scope = validate_scope scope
# If scope is special group, make sure value is in the list
value = validate_special_group(value) if scope == :special_group
value = validate_special_group value if scope == :special_group
# Remove any rules of this role, scope, and value
@rules.reject!(
&find_by_role_and_scope_and_value(role, scope, value)
Expand All @@ -972,17 +972,17 @@ def remove_access_role_scope_value role, scope, value
# @private
def remove_access_view value
# scope is view, make sure value is in the right format
value = validate_view(value)
value = validate_view value
# Remove existing view rule, if any
@rules.reject!(&find_view(value))
end

# @private
def lookup_access_role_scope_value role, scope, value
role = validate_role(role)
role = validate_role role
scope = validate_scope scope
# If scope is special group, make sure value is in the list
value = validate_special_group(value) if scope == :special_group
value = validate_special_group value if scope == :special_group
# Detect any rules of this role, scope, and value
!(!@rules.detect(
&find_by_role_and_scope_and_value(role, scope, value)
Expand All @@ -992,7 +992,7 @@ def lookup_access_role_scope_value role, scope, value
# @private
def lookup_access_view value
# scope is view, make sure value is in the right format
value = validate_view(value)
value = validate_view value
# Detect view rule, if any
!(!@rules.detect(&find_view(value)))
end
Expand Down
Expand Up @@ -124,7 +124,7 @@ def next
def all request_limit: nil
request_limit = request_limit.to_i if request_limit
unless block_given?
return enum_for(:all, request_limit: request_limit)
return enum_for :all, request_limit: request_limit
end
results = self
loop do
Expand Down
13 changes: 4 additions & 9 deletions google-cloud-bigquery/lib/google/cloud/bigquery/job.rb
Expand Up @@ -16,6 +16,7 @@
require "google/cloud/errors"
require "google/cloud/bigquery/service"
require "google/cloud/bigquery/job/list"
require "google/cloud/bigquery/convert"
require "json"

module Google
Expand Down Expand Up @@ -172,9 +173,7 @@ def failed?
# @return [Time, nil] The creation time from the job statistics.
#
def created_at
::Time.at(Integer(@gapi.statistics.creation_time) / 1000.0)
rescue StandardError
nil
Convert.millis_to_time @gapi.statistics.creation_time
end

##
Expand All @@ -185,9 +184,7 @@ def created_at
# @return [Time, nil] The start time from the job statistics.
#
def started_at
::Time.at(Integer(@gapi.statistics.start_time) / 1000.0)
rescue StandardError
nil
Convert.millis_to_time @gapi.statistics.start_time
end

##
Expand All @@ -197,9 +194,7 @@ def started_at
# @return [Time, nil] The end time from the job statistics.
#
def ended_at
::Time.at(Integer(@gapi.statistics.end_time) / 1000.0)
rescue StandardError
nil
Convert.millis_to_time @gapi.statistics.end_time
end

##
Expand Down
Expand Up @@ -124,7 +124,7 @@ def next
def all request_limit: nil
request_limit = request_limit.to_i if request_limit
unless block_given?
return enum_for(:all, request_limit: request_limit)
return enum_for :all, request_limit: request_limit
end
results = self
loop do
Expand Down
Expand Up @@ -125,7 +125,7 @@ def next
def all request_limit: nil
request_limit = request_limit.to_i if request_limit
unless block_given?
return enum_for(:all, request_limit: request_limit)
return enum_for :all, request_limit: request_limit
end
results = self
loop do
Expand Down
Expand Up @@ -1178,7 +1178,7 @@ def udfs_gapi_from array_or_str
Array(array_or_str).map do |uri_or_code|
resource =
Google::Apis::BigqueryV2::UserDefinedFunctionResource.new
if uri_or_code.start_with?("gs://")
if uri_or_code.start_with? "gs://"
resource.resource_uri = uri_or_code
else
resource.inline_code = uri_or_code
Expand Down
16 changes: 10 additions & 6 deletions google-cloud-bigquery/lib/google/cloud/bigquery/service.rb
Expand Up @@ -90,7 +90,9 @@ def list_datasets options = {}
# Returns the dataset specified by datasetID.
def get_dataset dataset_id
# The get operation is considered idempotent
execute(backoff: true) { service.get_dataset @project, dataset_id }
execute backoff: true do
service.get_dataset @project, dataset_id
end
end

##
Expand Down Expand Up @@ -253,7 +255,7 @@ def list_jobs options = {}
# Cancel the job specified by jobId.
def cancel_job job_id, location: nil
# The BigQuery team has told us cancelling is considered idempotent
execute(backoff: true) do
execute backoff: true do
service.cancel_job @project, job_id, location: location
end
end
Expand All @@ -262,7 +264,7 @@ def cancel_job job_id, location: nil
# Returns the job specified by jobID.
def get_job job_id, location: nil
# The get operation is considered idempotent
execute(backoff: true) do
execute backoff: true do
service.get_job @project, job_id, location: location
end
end
Expand All @@ -273,7 +275,9 @@ def insert_job config, location: nil
configuration: config
)
# Jobs have generated id, so this operation is considered idempotent
execute(backoff: true) { service.insert_job @project, job_object }
execute backoff: true do
service.insert_job @project, job_object
end
end

def query_job query_job_gapi
Expand Down Expand Up @@ -414,7 +418,7 @@ def inspect

# Generate a random string similar to the BigQuery service job IDs.
def generate_id
SecureRandom.urlsafe_base64(21)
SecureRandom.urlsafe_base64 21
end

def mime_type_for file
Expand All @@ -432,7 +436,7 @@ def execute backoff: nil
yield
end
rescue Google::Apis::Error => e
raise Google::Cloud::Error.from_error(e)
raise Google::Cloud::Error.from_error e
end

class Backoff
Expand Down
29 changes: 7 additions & 22 deletions google-cloud-bigquery/lib/google/cloud/bigquery/table.rb
Expand Up @@ -22,6 +22,7 @@
require "google/cloud/bigquery/external"
require "google/cloud/bigquery/insert_response"
require "google/cloud/bigquery/table/async_inserter"
require "google/cloud/bigquery/convert"
require "google/apis/bigquery_v2"

module Google
Expand Down Expand Up @@ -558,11 +559,7 @@ def rows_count
def created_at
return nil if reference?
ensure_full_data!
begin
::Time.at(Integer(@gapi.creation_time) / 1000.0)
rescue StandardError
nil
end
Convert.millis_to_time @gapi.creation_time
end

##
Expand All @@ -578,11 +575,7 @@ def created_at
def expires_at
return nil if reference?
ensure_full_data!
begin
::Time.at(Integer(@gapi.expiration_time) / 1000.0)
rescue StandardError
nil
end
Convert.millis_to_time @gapi.expiration_time
end

##
Expand All @@ -596,11 +589,7 @@ def expires_at
def modified_at
return nil if reference?
ensure_full_data!
begin
::Time.at(Integer(@gapi.last_modified_time) / 1000.0)
rescue StandardError
nil
end
Convert.millis_to_time @gapi.last_modified_time
end

##
Expand Down Expand Up @@ -987,11 +976,7 @@ def buffer_oldest_at
ensure_full_data!
return nil unless @gapi.streaming_buffer
oldest_entry_time = @gapi.streaming_buffer.oldest_entry_time
begin
::Time.at(Integer(oldest_entry_time) / 1000.0)
rescue StandardError
nil
end
Convert.millis_to_time oldest_entry_time
end

##
Expand Down Expand Up @@ -1747,7 +1732,7 @@ def load_job files, format: nil, create: nil, write: nil,

job_gapi = updater.to_gapi

return load_local(files, job_gapi) if local_file? files
return load_local files, job_gapi if local_file? files
load_storage files, job_gapi
end

Expand Down Expand Up @@ -2490,7 +2475,7 @@ def udfs_gapi array_or_str
return [] if array_or_str.nil?
Array(array_or_str).map do |uri_or_code|
resource = Google::Apis::BigqueryV2::UserDefinedFunctionResource.new
if uri_or_code.start_with?("gs://")
if uri_or_code.start_with? "gs://"
resource.resource_uri = uri_or_code
else
resource.inline_code = uri_or_code
Expand Down
Expand Up @@ -231,7 +231,8 @@ def run_background
time_since_first_publish = ::Time.now - @batch_created_at
if time_since_first_publish < @interval
# still waiting for the interval to insert the batch...
@cond.wait(@interval - time_since_first_publish)
timeout = @interval - time_since_first_publish
@cond.wait timeout
else
# interval met, insert the batch...
push_batch_request!
Expand All @@ -247,7 +248,7 @@ def push_batch_request!
orig_rows = @batch.rows
json_rows = @batch.json_rows
insert_ids = @batch.insert_ids
Concurrent::Future.new(executor: @thread_pool) do
Concurrent::Future.new executor: @thread_pool do
begin
raise ArgumentError, "No rows provided" if json_rows.empty?
options = { skip_invalid: @skip_invalid,
Expand Down
Expand Up @@ -134,7 +134,7 @@ def next
def all request_limit: nil
request_limit = request_limit.to_i if request_limit
unless block_given?
return enum_for(:all, request_limit: request_limit)
return enum_for :all, request_limit: request_limit
end
results = self
loop do
Expand Down