I would like the following hash output:
{ 0 => [0,1,2], 1 => [0,1,2], 2 => [0,1,2],
3 => [3,4,5], 4 => [3,4,5], 5 => [3,4,5],
6 => [6,7,8], 7 => [6,7,8], 8 => [6,7,8]
}
Intuitively I can just hard code these values in, but I am struggle to figure out how I can dynamically assign each hash key to the appropriate array value.. For instance perhaps creating a 9.times loop?
{ 0 => (0..2).to_a, 1 => (0..2).to_a, 2 => (0..2).to_a,
3 => (3..5).to_a, 4 => (3..5).to_a, 5 => (3..5).to_a,
6 => (6..8).to_a, 7 => (6..8).to_a, 8 => (6..8).to_a
}
Thanks for any guidance
With a dynamic upper bound:
count, split = 9, 3
0.upto(count - 1).map do |i|
[i, [*(i / split * split)...(i / split) * split + split]]
end.to_h
Using modulo operations (%) you can calculate the values of the subarrays:
(0..8).map { |i| [i, (i - i%3 .. i - i%3 + 2).to_a] }.to_h
# => {0=>[0, 1, 2], 1=>[0, 1, 2], 2=>[0, 1, 2], 3=>[3, 4, 5], 4=>[3, 4, 5], 5=>[3, 4, 5], 6=>[6, 7, 8], 7=>[6, 7, 8], 8=>[6, 7, 8]}
Maybe something like this:
[0..2, 3..5, 6..8].each.with_object({}) do |range, result|
range.each do |ind|
result[ind] = range.to_a
end
end
dummy_data = {}
(0..8).to_a.each_slice(3).map{ |m| m.each{|v| dummy_data[v] = m }}
it generates expected dummy data
dummy_data
=> {0=>[0, 1, 2], 1=>[0, 1, 2], 2=>[0, 1, 2], 3=>[3, 4, 5], 4=>[3, 4, 5], 5=>[3, 4, 5], 6=>[6, 7, 8], 7=>[6, 7, 8], 8=>[6, 7, 8]}
def make_da_hash(n)
a = (0..n*n-1).to_a
a.zip(a.each_slice(n).flat_map { |e| [e]*n }).to_h
end
make_da_hash(3)
#=> {0=>[0, 1, 2], 1=>[0, 1, 2], 2=>[0, 1, 2],
# 3=>[3, 4, 5], 4=>[3, 4, 5], 5=>[3, 4, 5],
# 6=>[6, 7, 8], 7=>[6, 7, 8], 8=>[6, 7, 8]}
make_da_hash(4)
#=> {0=>[0, 1, 2, 3], 1=>[0, 1, 2, 3], 2=>[0, 1, 2, 3], 3=>[0, 1, 2, 3],
# 4=>[4, 5, 6, 7], 5=>[4, 5, 6, 7], 6=>[4, 5, 6, 7], 7=>[4, 5, 6, 7],
# 8=>[8, 9, 10, 11], 9=>[8, 9, 10, 11], 10=>[8, 9, 10, 11],
# 11=>[8, 9, 10, 11], 12=>[12, 13, 14, 15], 13=>[12, 13, 14, 15],
# 14=>[12, 13, 14, 15], 15=>[12, 13, 14, 15]}
{}.merge(*Array.new(3){|i| (0..8).group_by{|n| n / 3 * 3 + i}})
Output:
{
0=>[0, 1, 2],
3=>[3, 4, 5],
6=>[6, 7, 8],
1=>[0, 1, 2],
4=>[3, 4, 5],
7=>[6, 7, 8],
2=>[0, 1, 2],
5=>[3, 4, 5],
8=>[6, 7, 8]
}
Related
From:
arr1 = np.array([ [1, 2, 3], [4, 5, 6], [7, 8, 9] ])
To:
arr1 = np.array([ [0, 1, 2, 3], [0, 4, 5, 6], [0, 7, 8, 9] ])
You can try something like this with numpy.full:
x = 0
new = np.full((arr1.shape[0], arr1.shape[1] + 1), x)
new[:, 1:] = arr1
Output
new
array([[0, 1, 2, 3],
[0, 4, 5, 6],
[0, 7, 8, 9]])
Note that you can assign any value to x.
Your (3,3) 2d array:
In [100]: arr1 = np.array([ [1, 2, 3], [4, 5, 6], [7, 8, 9] ])
In [101]: arr1
Out[101]:
array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
A new (3,4) array:
In [102]: np.concatenate((np.zeros((3,1),int),arr1), axis=1)
Out[102]:
array([[0, 1, 2, 3],
[0, 4, 5, 6],
[0, 7, 8, 9]])
Any other (3,1) array (or even a (3,n)) could be added "at the start" like this.
Is there any method to split an array like this?
[1, 2, 3, 4, 5, 6, 7, 8, 9].split(3, 4, 2)
#=> [[1, 2, 3],[4, 5, 6, 7],[8, 9]]
Immutable version with λ:
▶ splitter = ->(array, *parts) do
parts.reduce([[], 0]) do |acc, i|
right = acc.last + i
[acc.first << (acc.last...right), right]
end.first.map { |r| array[r] }
end
#⇒ #<Proc:0x0055ae3d9ae7c8#(pry):18 (lambda)>
▶ splitter.((1..9).to_a, 3, 4, 2)
#⇒ [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
No, there is none, but you can easily write one yourself.
class Array
def in_groups_of_n(*sizes)
sizes.map(&method(:shift))
end
end
Example:
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9]
arr.in_groups_of_n(3, 4, 2)
# => [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
Demonstration
In case you want a none-destructive version, you can use a dup method:
class Array
def in_groups_of_n(*sizes)
duplicate = dup
sizes.map { |size| duplicate.shift(size) }
end
end
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9]
arr.in_groups_of_n(3,4,2)
# => [[1, 2, 3], [4, 5, 6, 7], [8, 9]
arr
# => [1, 2, 3, 4, 5, 6, 7, 8, 9]
Demonstration
Here's a naive Array implementation:
class Array
def multi_split(*sizes)
r = []
e = self.each
sizes.each do |size|
t = []
size.times do
t << e.next
end
r << t
end
r
end
end
p [1, 2, 3, 4, 5, 6, 7, 8, 9].multi_split(3, 4, 2)
# [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
#Stefan mentioned it might make sense to implement it on Enumerable:
module Enumerable
def multi_split(*sizes)
Enumerator.new do |yielder|
e = self.each
sizes.each do |size|
yielder << Array.new(size){ e.next }
end
end
end
end
p [1, 2, 3, 4, 5, 6, 7, 8, 9].multi_split(3, 4, 2).to_a
# [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
Another option (lossless in event the splits are not equal to the array size
def split_at(arr,splits)
rest = arr.last(arr.size - splits.reduce(:+))
enum = arr.to_enum
splits.map do |n|
n.times.map { enum.next }
end.concat(rest.empty? ? [] : [rest])
end
Then called as
split_at (1..9), [3,4,2]
#=> [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
split_at (1..22), [3,4,2]
#=> [[1, 2, 3], [4, 5, 6, 7], [8, 9], [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22]]
Example
class Array
def split_by_number(*sizes)
sizes.each_with_object([]) { |n,a| a << [a.empty? ? 0 : a.last.sum, n] }.
map { |start, nbr| self[start, nbr] }
end
end
[1, 2, 3, 4, 5, 6, 7, 8, 9].split_by_number 3, 4, 2
#=> [[1, 2, 3], [4, 5, 6, 7], [8, 9]]
Note that
[3, 4, 2].each_with_object([]) { |n,a| a << [a.empty? ? 0 : a.last.sum, n] }
#=> [[0, 3], [3, 4], [7, 2]]
I would like to find all the permutations of plucking 3, 4 or 5 numbers from [2,3,4,5,6,7,8], repeats allowed, such that their sum is 16. So [8,5,3], [8,3,5] and [4,3,3,3,3] are valid permutations. Also circular permutations should be removed so [3,3,3,3,4] wouldn't also be added to the answer.
I can do this in Ruby without allowing repeats like this:
d = [2,3,4,5,6,7,8]
number_of_divisions = [3,4,5]
number_of_divisions.collect do |n|
d.permutation(n).to_a.reject do |p|
p[0..n].inject(0) { |sum,x| sum + x } != 16
end
end
How could I allow repeats so that [3,3,3,3,4] was included?
For all permutations, including duplicates, one might use Array#repeated_permutation:
d = [2,3,4,5,6,7,8]
number_of_divisions = [3,4,5]
number_of_divisions.flat_map do |n|
d.repeated_permutation(n).reject do |p| # no need `to_a`
p.inject(:+) != 16
end
end
or, even better with Array#repeated_combination:
number_of_divisions.flat_map do |n|
d.repeated_combination(n).reject do |p| # no need `to_a`
p.inject(:+) != 16
end
end
There are far fewer repeated combinations than repeated permutations, so let's find the repeated combinations that sum to the given value, then permute each of those. Moreover, by applying uniq at each of several steps of the calculation we can significantly reduce the number of repeated combinations and permutations considered.
Code
require 'set'
def rep_perms_for_all(arr, n_arr, tot)
n_arr.flat_map { |n| rep_perms_for_1(arr, n, tot) }
end
def rep_perms_for_1(arr, n, tot)
rep_combs_to_rep_perms(rep_combs_for_1(arr, n, tot)).uniq
end
def rep_combs_for_1(arr, n, tot)
arr.repeated_combination(n).uniq.select { |c| c.sum == tot }
end
def rep_combs_to_rep_perms(combs)
combs.flat_map { |c| comb_to_perms(c) }.uniq
end
def comb_to_perms(comb)
comb.permutation(comb.size).uniq.uniq do |p|
p.size.times.with_object(Set.new) { |i,s| s << p.rotate(i) }
end
end
Examples
rep_perms_for_all([2,3,4,5], [3], 12)
#=> [[2, 5, 5], [3, 4, 5], [3, 5, 4], [4, 4, 4]]
rep_perms_for_all([2,3,4,5,6,7,8], [3,4,5], 16).size
#=> 93
rep_perms_for_all([2,3,4,5,6,7,8], [3,4,5], 16)
#=> [[2, 6, 8], [2, 8, 6], [2, 7, 7], [3, 5, 8], [3, 8, 5], [3, 6, 7],
# [3, 7, 6], [4, 4, 8], [4, 5, 7], [4, 7, 5], [4, 6, 6], [5, 5, 6],
# [2, 2, 4, 8], [2, 2, 8, 4], [2, 4, 2, 8], [2, 2, 5, 7], [2, 2, 7, 5],
# [2, 5, 2, 7], [2, 2, 6, 6], [2, 6, 2, 6], [2, 3, 3, 8], [2, 3, 8, 3],
# ...
# [3, 3, 3, 7], [3, 3, 4, 6], [3, 3, 6, 4], [3, 4, 3, 6], [3, 3, 5, 5],
# [3, 5, 3, 5], [3, 4, 4, 5], [3, 4, 5, 4], [3, 5, 4, 4], [4, 4, 4, 4],
# ...
# [2, 2, 4, 5, 3], [2, 2, 5, 3, 4], [2, 2, 5, 4, 3], [2, 3, 2, 4, 5],
# [2, 3, 2, 5, 4], [2, 3, 4, 2, 5], [2, 3, 5, 2, 4], [2, 4, 2, 5, 3],
# ...
# [2, 5, 3, 3, 3], [2, 3, 3, 4, 4], [2, 3, 4, 3, 4], [2, 3, 4, 4, 3],
# [2, 4, 3, 3, 4], [2, 4, 3, 4, 3], [2, 4, 4, 3, 3], [3, 3, 3, 3, 4]]
Explanation
rep_combs_for_1 uses the method Enumerable#sum, which made its debut in Ruby v2.4. For earlier versions, use c.reduce(:0) == tot.
In comb_to_perms, the first uniq simply removes duplicates. The second uniq, with a block, removes all but one of the p.size elements (arrays) that can be obtained by rotating any of the other p-1 elements. For example,
p = [1,2,3]
p.size.times.with_object(Set.new) { |i,s| s << p.rotate(i) }
#=> #<Set: {[1, 2, 3], [2, 3, 1], [3, 1, 2]}>
I have an array of arrays. I want to concatenate the first, second, third elements of arrays.
Example arrays:
a = [[4, 5, 6], [1, 2, 3], [8, 9, 10]]
a1 = [[1, 2, 3], [8, 9, 10]]
a2 = [[4, 5, 6], [1, 2, 3], [8, 9, 10], [11, 21, 31]]
Output:
out of a: [[4,1,8],[5,2,9],[6,3,10]]
out of a1: [[1,8],[2,9],[3,10]]
out of a2: [[4,1,8,11],[5,2,9,21],[6,3,10,31]]
Use transpose method
a.transpose
=> [[4, 1, 8], [5, 2, 9], [6, 3, 10]]
Array#transpose:
[a, a1, a2].map(&:transpose)
# [
# [[4, 1, 8], [5, 2, 9], [6, 3, 10]],
# [[1, 8], [2, 9], [3, 10]],
# [[4, 1, 8, 11], [5, 2, 9, 21], [6, 3, 10, 31]]
# ]
Whenever Array#transpose can be used so can Enumerable#zip.
a.first.zip *a.drop(1)
#=> [[4,1,8],[5,2,9],[6,3,10]]
Is there an efficient way to remove subsets from an array of sets
E.g. array of arrays
[[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]]
to output an array
[[2, 3, 4, 7, 8, 9, 10], [1, 5, 6]]
The key is guaranteeing the source sets are sorted in descending order of size. That way all supersets precede their subsets.
Here’s a generic function to do it. You could adapt it to take any kind of sequence of sequence of hashable and convert them to an array of sets on the way in:
func removeSubsets<T: Hashable>(source: [Set<T>]) -> [Set<T>] {
let sets = source.sorted { $0.count > $1.count }
var supersets: [Set<T>] = []
for set in sets {
if !contains(supersets, { set.isSubsetOf($0) }) {
supersets.append(set)
}
}
return supersets
}
removeSubsets([[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]])
// returns [{10, 2, 9, 4, 7, 3, 8}, {5, 6, 1}]
It's still cubic unfortunately since contains is linear and so is isSubsetOf.
EDIT: here's the fully generic version:
func removeSubsets
<S0: SequenceType, S1: SequenceType
where S0.Generator.Element == S1,
S1.Generator.Element: Hashable>
(source: S0) -> [Set<S1.Generator.Element>]
{
let sets = map(source) { Set($0) }.sorted { $0.count > $1.count }
var supersets: [Set<S1.Generator.Element>] = []
for set in sets {
if !contains(supersets, { set.isSubsetOf($0) }) {
supersets.append(set)
}
}
return supersets
}
let a: [[Int]] = [
[2, 3, 4, 7, 8, 9, 10],
[1, 5, 6], [3, 7, 10],
[4, 8, 9], [5, 6],
[7, 10], [8, 9],
[6], [9]]
removeSubsets(a) // returns [{10, 2, 9, 4, 7, 3, 8}, {5, 6, 1}]
EDIT2: if you want the result to be an array of the original arrays (since converting them to sets loses their ordering), you could make the following change, which takes more space but is also slightly more efficient since it only converts the supersets to sets, not the subsets:
func removeSubsets<T: Hashable>(source: [[T]]) -> [[T]] {
// note, this is quite efficient since arrays are copy-on-write,
// so it is only really creating a new array of pointers
let sets = source.sorted { $0.count > $1.count }
var supersets: [Set<T>] = []
var result: [[T]] = []
for set in sets {
if !contains(supersets, { $0.isSupersetOf(set) }) {
supersets.append(Set(set))
result.append(set)
}
}
return result
}
removeSubsets([[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]])
// returns [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6]]
EDIT3: if you want to keep the original order of the sets (just with the subsets removed), you could tag them with a number on the way in before sorting, then re-sort them using it and strip it off the result at the end:
func removeSubsets<T: Hashable>(source: [[T]]) -> [[T]] {
let sets = sorted(enumerate(source)) { $0.1.count > $1.1.count }
var supersets: [Set<T>] = []
var result: [(Int,[T])] = []
for (n,set) in sets {
if !contains(supersets, { $0.isSupersetOf(set) }) {
supersets.append(Set(set))
result.append(n,set)
}
}
return result.sorted { $0.0 < $1.0 }.map { $1 }
}
// note, input not sorted in order of length
removeSubsets([[1, 5, 6], [2, 3, 4, 7, 8, 9, 10], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]])
// returns [[1, 5, 6], [2, 3, 4, 7, 8, 9, 10]]
Just like with any other (non-2D/set) array you could use an array extension like this ...
extension Array
{
func slice(indices:Int...) -> Array
{
var s = indices[0];
var e = self.count - 1;
if (indices.count > 1)
{
e = indices[1];
}
if (e < 0)
{
e += self.count;
}
if (s < 0)
{
s += self.count;
}
let count = (s < e ? e - s : s - e) + 1;
let inc = s < e ? 1 : -1;
var result = Array();
var idx = s;
for i in 0 ..< count
{
result.append(self[idx]);
idx += inc;
}
return result;
}
}
Usage:
let a = [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]];
let b = a.slice(0, 1);
let c = a.slice(3);
If your array does not contain duplicated int values, you can convert to Set to use some feature from Swift:
(Take a look at Performing Set Operations)
https://developer.apple.com/library/prerelease/ios/documentation/Swift/Conceptual/Swift_Programming_Language/CollectionTypes.html
Here is my code to get another Array which does not contain subsets. This method is not optimized, it works however.
//let arrayOfArray = [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]]
//use set instead
var setArray : [Set<Int>] = [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]]
setArray.sort({$0.count > $1.count}) //sort to have ordered array (biggest set at first)
var result = [Set<Int>]() //you will get your result in this variable.
for _aSet in setArray {
var isSubSet = false
for _exitSet in result {
if _aSet.isSubsetOf(_exitSet) {
isSubSet = true
break;
}
}
if (!isSubSet) {
result.append(_aSet)
}
}
This is the most efficient way I could think of:
let nArrays = [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]]
nArrays
.reduce([Set<Int>]()) {
accu, el in let setEl = Set(el)
return contains(accu) {setEl.isSubsetOf($0)} ? accu : accu + [setEl]
}
//[{10, 2, 9, 4, 7, 3, 8}, {5, 6, 1}]
Instead of checking if every array is a subset of every other array, you just check if they're a subset of the already checked arrays. Of course, that returns an array of Sets, not an array of Arrays, but you can map() over it to convert it back:
let nArrays = [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]]
nArrays
.reduce([Set<Int>]()) {
accu, el in let setEl = Set(el)
return contains(accu) {setEl.isSubsetOf($0)} ? accu : accu + [setEl]
}
.map{Array($0)}
//[[10, 2, 9, 4, 7, 3, 8], [5, 6, 1]]
You could do this:
let arrayOfArray = [[2, 3, 4, 7, 8, 9, 10], [1, 5, 6], [3, 7, 10], [4, 8, 9], [5, 6], [7, 10], [8, 9], [6], [9]]
let output = arrayOfArray[0...1]