diff --git a/NEWS.md b/NEWS.md
index 61e7bc790a5bfb..0ae8c4cb76b000 100644
--- a/NEWS.md
+++ b/NEWS.md
@@ -59,10 +59,6 @@ Note: We're only listing outstanding class updates.
## Stdlib updates
-The following default gem is added.
-
-* win32-registry 0.0.1
-
* Tempfile
* The keyword argument `anonymous: true` is implemented for Tempfile.create.
@@ -75,6 +71,10 @@ The following default gem is added.
* This library is now extracted from the Ruby repository to [ruby/net-http-sspi].
[[Feature #20775]]
+The following default gem is added.
+
+* win32-registry 0.0.1
+
The following default gems are updated.
* RubyGems 3.6.0.dev
@@ -94,10 +94,15 @@ The following default gems are updated.
* resolv 0.4.0
* stringio 3.1.2.dev
* strscan 3.1.1.dev
+* syntax_suggest 2.0.1
* time 0.4.0
* uri 0.13.1
* zlib 3.1.1
+The following bundled gem is added.
+
+* repl_type_completor 0.1.7
+
The following bundled gems are updated.
* minitest 5.25.1
@@ -129,10 +134,6 @@ The following bundled gems are promoted from default gems.
* syslog 0.1.2
* csv 3.3.0
-The following bundled gem is added.
-
-* repl_type_completor 0.1.7
-
See GitHub releases like [GitHub Releases of Logger] or changelog for
details of the default gems or bundled gems.
diff --git a/array.c b/array.c
index 015395f74caa72..52e103989baa05 100644
--- a/array.c
+++ b/array.c
@@ -3352,43 +3352,12 @@ sort_2(const void *ap, const void *bp, void *dummy)
/*
* call-seq:
- * array.sort! -> self
- * array.sort! {|a, b| ... } -> self
+ * sort! -> self
+ * sort! {|a, b| ... } -> self
*
- * Returns +self+ with its elements sorted in place.
- *
- * With no block, compares elements using operator #<=>
- * (see Comparable):
- *
- * a = 'abcde'.split('').shuffle
- * a # => ["e", "b", "d", "a", "c"]
- * a.sort!
- * a # => ["a", "b", "c", "d", "e"]
- *
- * With a block, calls the block with each element pair;
- * for each element pair +a+ and +b+, the block should return an integer:
- *
- * - Negative when +b+ is to follow +a+.
- * - Zero when +a+ and +b+ are equivalent.
- * - Positive when +a+ is to follow +b+.
- *
- * Example:
- *
- * a = 'abcde'.split('').shuffle
- * a # => ["e", "b", "d", "a", "c"]
- * a.sort! {|a, b| a <=> b }
- * a # => ["a", "b", "c", "d", "e"]
- * a.sort! {|a, b| b <=> a }
- * a # => ["e", "d", "c", "b", "a"]
- *
- * When the block returns zero, the order for +a+ and +b+ is indeterminate,
- * and may be unstable:
- *
- * a = 'abcde'.split('').shuffle
- * a # => ["e", "b", "d", "a", "c"]
- * a.sort! {|a, b| 0 }
- * a # => ["d", "e", "c", "a", "b"]
+ * Like Array#sort, but returns +self+ with its elements sorted in place.
*
+ * Related: see {Methods for Assigning}[rdoc-ref:Array@Methods+for+Assigning].
*/
VALUE
@@ -3455,21 +3424,18 @@ rb_ary_sort_bang(VALUE ary)
/*
* call-seq:
- * array.sort -> new_array
- * array.sort {|a, b| ... } -> new_array
+ * sort -> new_array
+ * sort {|a, b| ... } -> new_array
*
- * Returns a new +Array+ whose elements are those from +self+, sorted.
+ * Returns a new array containing the elements of +self+, sorted.
*
- * With no block, compares elements using operator #<=>
- * (see Comparable):
+ * With no block given, compares elements using operator #<=>
+ * (see Object#<=>):
*
- * a = 'abcde'.split('').shuffle
- * a # => ["e", "b", "d", "a", "c"]
- * a1 = a.sort
- * a1 # => ["a", "b", "c", "d", "e"]
+ * [0, 2, 3, 1].sort # => [0, 1, 2, 3]
*
- * With a block, calls the block with each element pair;
- * for each element pair +a+ and +b+, the block should return an integer:
+ * With a block given, calls the block with each combination of pairs of elements from +self+;
+ * for each pair +a+ and +b+, the block should return a numeric:
*
* - Negative when +b+ is to follow +a+.
* - Zero when +a+ and +b+ are equivalent.
@@ -3477,22 +3443,14 @@ rb_ary_sort_bang(VALUE ary)
*
* Example:
*
- * a = 'abcde'.split('').shuffle
- * a # => ["e", "b", "d", "a", "c"]
- * a1 = a.sort {|a, b| a <=> b }
- * a1 # => ["a", "b", "c", "d", "e"]
- * a2 = a.sort {|a, b| b <=> a }
- * a2 # => ["e", "d", "c", "b", "a"]
+ * a = [3, 2, 0, 1]
+ * a.sort {|a, b| a <=> b } # => [0, 1, 2, 3]
+ * a.sort {|a, b| b <=> a } # => [3, 2, 1, 0]
*
* When the block returns zero, the order for +a+ and +b+ is indeterminate,
- * and may be unstable:
+ * and may be unstable.
*
- * a = 'abcde'.split('').shuffle
- * a # => ["e", "b", "d", "a", "c"]
- * a1 = a.sort {|a, b| 0 }
- * a1 # => ["c", "e", "b", "d", "a"]
- *
- * Related: Enumerable#sort_by.
+ * Related: see {Methods for Fetching}[rdoc-ref:Array@Methods+for+Fetching].
*/
VALUE
@@ -4145,71 +4103,94 @@ ary_slice_bang_by_rb_ary_splice(VALUE ary, long pos, long len)
/*
* call-seq:
- * array.slice!(n) -> object or nil
- * array.slice!(start, length) -> new_array or nil
- * array.slice!(range) -> new_array or nil
+ * slice!(index) -> object or nil
+ * slice!(start, length) -> new_array or nil
+ * slice!(range) -> new_array or nil
*
* Removes and returns elements from +self+.
*
- * When the only argument is an Integer +n+,
- * removes and returns the _nth_ element in +self+:
+ * With numeric argument +index+ given,
+ * removes and returns the element at offset +index+:
*
- * a = [:foo, 'bar', 2]
- * a.slice!(1) # => "bar"
- * a # => [:foo, 2]
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(2) # => "c"
+ * a # => ["a", "b", "d"]
+ * a.slice!(2.1) # => "d"
+ * a # => ["a", "b"]
*
- * If +n+ is negative, counts backwards from the end of +self+:
+ * If +index+ is negative, counts backwards from the end of +self+:
*
- * a = [:foo, 'bar', 2]
- * a.slice!(-1) # => 2
- * a # => [:foo, "bar"]
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(-2) # => "c"
+ * a # => ["a", "b", "d"]
*
- * If +n+ is out of range, returns +nil+.
+ * If +index+ is out of range, returns +nil+.
*
- * When the only arguments are Integers +start+ and +length+,
- * removes +length+ elements from +self+ beginning at offset +start+;
- * returns the deleted objects in a new +Array+:
+ * With numeric arguments +start+ and +length+ given,
+ * removes +length+ elements from +self+ beginning at zero-based offset +start+;
+ * returns the removed objects in a new array:
*
- * a = [:foo, 'bar', 2]
- * a.slice!(0, 2) # => [:foo, "bar"]
- * a # => [2]
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(1, 2) # => ["b", "c"]
+ * a # => ["a", "d"]
+ * a.slice!(0.1, 1.1) # => ["a"]
+ * a # => ["d"]
+ *
+ * If +start+ is negative, counts backwards from the end of +self+:
+ *
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(-2, 1) # => ["c"]
+ * a # => ["a", "b", "d"]
+ *
+ * If +start+ is out-of-range, returns +nil+:
+ *
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(5, 1) # => nil
+ * a.slice!(-5, 1) # => nil
*
* If start + length exceeds the array size,
* removes and returns all elements from offset +start+ to the end:
*
- * a = [:foo, 'bar', 2]
- * a.slice!(1, 50) # => ["bar", 2]
- * a # => [:foo]
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(2, 50) # => ["c", "d"]
+ * a # => ["a", "b"]
*
* If start == a.size and +length+ is non-negative,
- * returns a new empty +Array+.
+ * returns a new empty array.
*
* If +length+ is negative, returns +nil+.
*
- * When the only argument is a Range object +range+,
- * treats range.min as +start+ above and range.size as +length+ above:
+ * With Range argument +range+ given,
+ * treats range.min as +start+ (as above)
+ * and range.size as +length+ (as above):
*
- * a = [:foo, 'bar', 2]
- * a.slice!(1..2) # => ["bar", 2]
- * a # => [:foo]
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(1..2) # => ["b", "c"]
+ * a # => ["a", "d"]
*
- * If range.start == a.size, returns a new empty +Array+.
+ * If range.start == a.size, returns a new empty array:
*
- * If range.start is larger than the array size, returns +nil+.
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(4..5) # => []
*
- * If range.end is negative, counts backwards from the end of the array:
+ * If range.start is larger than the array size, returns +nil+:
*
- * a = [:foo, 'bar', 2]
- * a.slice!(0..-2) # => [:foo, "bar"]
- * a # => [2]
+ * a = ['a', 'b', 'c', 'd']
+ a.slice!(5..6) # => nil
*
* If range.start is negative,
- * calculates the start index backwards from the end of the array:
+ * calculates the start index by counting backwards from the end of +self+:
*
- * a = [:foo, 'bar', 2]
- * a.slice!(-2..2) # => ["bar", 2]
- * a # => [:foo]
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(-2..2) # => ["c"]
+ *
+ * If range.end is negative,
+ * calculates the end index by counting backwards from the end of +self+:
*
+ * a = ['a', 'b', 'c', 'd']
+ * a.slice!(0..-2) # => ["a", "b", "c"]
+ *
+ * Related: see {Methods for Deleting}[rdoc-ref:Array@Methods+for+Deleting].
*/
static VALUE
diff --git a/array.rb b/array.rb
index 23d38ebc655f7e..f6c8af8f78360f 100644
--- a/array.rb
+++ b/array.rb
@@ -63,21 +63,30 @@ def each
#
# The object given with keyword argument +random+ is used as the random number generator.
#
- # Related: see {Methods for Fetching}[rdoc-ref:Array@Methods+for+Fetching].
+ # Related: see {Methods for Assigning}[rdoc-ref:Array@Methods+for+Assigning].
def shuffle!(random: Random)
Primitive.rb_ary_shuffle_bang(random)
end
# call-seq:
- # array.shuffle(random: Random) -> new_ary
+ # shuffle(random: Random) -> new_array
+ #
+ # Returns a new array containing all elements from +self+ in a random order,
+ # as selected by the object given by keyword argument +random+:
+ #
+ # a = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
+ # a.shuffle # => [0, 8, 1, 9, 6, 3, 4, 7, 2, 5]
+ # a.shuffle # => [8, 9, 0, 5, 1, 2, 6, 4, 7, 3]
+ #
+ # Duplicate elements are included:
#
- # Returns a new array with elements of +self+ shuffled.
- # a = [1, 2, 3] #=> [1, 2, 3]
- # a.shuffle #=> [2, 3, 1]
- # a #=> [1, 2, 3]
+ # a = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]
+ # a.shuffle # => [1, 0, 1, 1, 0, 0, 1, 0, 0, 1]
+ # a.shuffle # => [1, 1, 0, 0, 0, 1, 1, 0, 0, 1]
#
- # The optional +random+ argument will be used as the random number generator:
- # a.shuffle(random: Random.new(1)) #=> [1, 3, 2]
+ # The object given with keyword argument +random+ is used as the random number generator.
+ #
+ # Related: see {Methods for Fetching}[rdoc-ref:Array@Methods+for+Fetching].
def shuffle(random: Random)
Primitive.rb_ary_shuffle(random)
end
diff --git a/doc/maintainers.md b/doc/maintainers.md
index fa2c24140b4944..0bdfdc268355bb 100644
--- a/doc/maintainers.md
+++ b/doc/maintainers.md
@@ -199,7 +199,8 @@ have commit right, others don't.
#### lib/prism.rb
* Kevin Newton (kddnewton)
-* Jemma Issroff (jemmaissroff)
+* Eileen Uchitelle (eileencodes)
+* Aaron Patterson (tenderlove)
* https://github.com/ruby/prism
* https://rubygems.org/gems/prism
diff --git a/doc/yjit/yjit.md b/doc/yjit/yjit.md
index d57d5dceda43e5..49e395285f6992 100644
--- a/doc/yjit/yjit.md
+++ b/doc/yjit/yjit.md
@@ -166,6 +166,12 @@ You can dump statistics about compilation and execution by running YJIT with the
./miniruby --yjit-stats myscript.rb
```
+You can see what YJIT has compiled by running YJIT with the `--yjit-log` command-line option:
+
+```sh
+./miniruby --yjit-log myscript.rb
+```
+
The machine code generated for a given method can be printed by adding `puts RubyVM::YJIT.disasm(method(:method_name))` to a Ruby script. Note that no code will be generated if the method is not compiled.
Command-Line Options
@@ -181,6 +187,8 @@ YJIT supports all command-line options supported by upstream CRuby, but also add
compiled, lower values mean less code is compiled (default 200K)
- `--yjit-stats`: print statistics after the execution of a program (incurs a run-time cost)
- `--yjit-stats=quiet`: gather statistics while running a program but don't print them. Stats are accessible through `RubyVM::YJIT.runtime_stats`. (incurs a run-time cost)
+- `--yjit-log[=file|dir]`: log all compilation events to the specified file or directory. If no name is supplied, the last 1024 log entries will be printed to stderr when the application exits.
+- `--yjit-log=quiet`: gather a circular buffer of recent YJIT compilations. The compilation log entries are accessible through `RubyVM::YJIT.log` and old entries will be discarded if the buffer is not drained quickly. (incurs a run-time cost)
- `--yjit-disable`: disable YJIT despite other `--yjit*` flags for lazily enabling it with `RubyVM::YJIT.enable`
- `--yjit-code-gc`: enable code GC (disabled by default as of Ruby 3.3).
It will cause all machine code to be discarded when the executable memory size limit is hit, meaning JIT compilation will then start over.
diff --git a/ext/etc/etc.c b/ext/etc/etc.c
index fcbd1af1b5cc6c..6c08088710e26d 100644
--- a/ext/etc/etc.c
+++ b/ext/etc/etc.c
@@ -691,6 +691,16 @@ etc_getgrent(VALUE obj)
VALUE rb_w32_special_folder(int type);
UINT rb_w32_system_tmpdir(WCHAR *path, UINT len);
VALUE rb_w32_conv_from_wchar(const WCHAR *wstr, rb_encoding *enc);
+#elif defined(LOAD_RELATIVE)
+static inline VALUE
+rbconfig(void)
+{
+ VALUE config;
+ rb_require("rbconfig");
+ config = rb_const_get(rb_path2class("RbConfig"), rb_intern("CONFIG"));
+ Check_Type(config, T_HASH);
+ return config;
+}
#endif
/* call-seq:
@@ -710,6 +720,8 @@ etc_sysconfdir(VALUE obj)
{
#ifdef _WIN32
return rb_w32_special_folder(CSIDL_COMMON_APPDATA);
+#elif defined(LOAD_RELATIVE)
+ return rb_hash_aref(rbconfig(), rb_str_new_lit("sysconfdir"));
#else
return rb_filesystem_str_new_cstr(SYSCONFDIR);
#endif
diff --git a/ext/json/fbuffer/fbuffer.h b/ext/json/fbuffer/fbuffer.h
index 1fb9d667003452..76bd6ce1296f0b 100644
--- a/ext/json/fbuffer/fbuffer.h
+++ b/ext/json/fbuffer/fbuffer.h
@@ -1,43 +1,8 @@
-
#ifndef _FBUFFER_H_
#define _FBUFFER_H_
#include "ruby.h"
-
-#ifndef RHASH_SIZE
-#define RHASH_SIZE(hsh) (RHASH(hsh)->tbl->num_entries)
-#endif
-
-#ifndef RFLOAT_VALUE
-#define RFLOAT_VALUE(val) (RFLOAT(val)->value)
-#endif
-
-#ifndef RARRAY_LEN
-#define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len
-#endif
-#ifndef RSTRING_PTR
-#define RSTRING_PTR(string) RSTRING(string)->ptr
-#endif
-#ifndef RSTRING_LEN
-#define RSTRING_LEN(string) RSTRING(string)->len
-#endif
-
-#ifdef PRIsVALUE
-# define RB_OBJ_CLASSNAME(obj) rb_obj_class(obj)
-# define RB_OBJ_STRING(obj) (obj)
-#else
-# define PRIsVALUE "s"
-# define RB_OBJ_CLASSNAME(obj) rb_obj_classname(obj)
-# define RB_OBJ_STRING(obj) StringValueCStr(obj)
-#endif
-
#include "ruby/encoding.h"
-#define FORCE_UTF8(obj) rb_enc_associate((obj), rb_utf8_encoding())
-
-/* We don't need to guard objects for rbx, so let's do nothing at all. */
-#ifndef RB_GC_GUARD
-#define RB_GC_GUARD(object)
-#endif
typedef struct FBufferStruct {
unsigned long initial_length;
@@ -55,14 +20,15 @@ typedef struct FBufferStruct {
static FBuffer *fbuffer_alloc(unsigned long initial_length);
static void fbuffer_free(FBuffer *fb);
+#ifndef JSON_GENERATOR
static void fbuffer_clear(FBuffer *fb);
+#endif
static void fbuffer_append(FBuffer *fb, const char *newstr, unsigned long len);
#ifdef JSON_GENERATOR
static void fbuffer_append_long(FBuffer *fb, long number);
#endif
static void fbuffer_append_char(FBuffer *fb, char newchr);
#ifdef JSON_GENERATOR
-static FBuffer *fbuffer_dup(FBuffer *fb);
static VALUE fbuffer_to_s(FBuffer *fb);
#endif
@@ -86,10 +52,12 @@ static void fbuffer_free(FBuffer *fb)
ruby_xfree(fb);
}
+#ifndef JSON_GENERATOR
static void fbuffer_clear(FBuffer *fb)
{
fb->len = 0;
}
+#endif
static inline void fbuffer_inc_capa(FBuffer *fb, unsigned long requested)
{
@@ -168,21 +136,10 @@ static void fbuffer_append_long(FBuffer *fb, long number)
fbuffer_append(fb, buf, len);
}
-static FBuffer *fbuffer_dup(FBuffer *fb)
-{
- unsigned long len = fb->len;
- FBuffer *result;
-
- result = fbuffer_alloc(len);
- fbuffer_append(result, FBUFFER_PAIR(fb));
- return result;
-}
-
static VALUE fbuffer_to_s(FBuffer *fb)
{
- VALUE result = rb_str_new(FBUFFER_PTR(fb), FBUFFER_LEN(fb));
+ VALUE result = rb_utf8_str_new(FBUFFER_PTR(fb), FBUFFER_LEN(fb));
fbuffer_free(fb);
- FORCE_UTF8(result);
return result;
}
#endif
diff --git a/ext/json/generator/generator.c b/ext/json/generator/generator.c
index 50fcb77bdfccc4..815fd973d79f31 100644
--- a/ext/json/generator/generator.c
+++ b/ext/json/generator/generator.c
@@ -1,22 +1,13 @@
#include "../fbuffer/fbuffer.h"
#include "generator.h"
-static VALUE mJSON, mExt, mGenerator, cState, mGeneratorMethods, mObject,
- mHash, mArray,
-#ifdef RUBY_INTEGER_UNIFICATION
- mInteger,
-#else
- mFixnum, mBignum,
+#ifndef RB_UNLIKELY
+#define RB_UNLIKELY(cond) (cond)
#endif
- mFloat, mString, mString_Extend,
- mTrueClass, mFalseClass, mNilClass, eGeneratorError,
- eNestingError;
-static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before,
- i_object_nl, i_array_nl, i_max_nesting, i_allow_nan, i_ascii_only,
- i_pack, i_unpack, i_create_id, i_extend, i_key_p,
- i_aref, i_send, i_respond_to_p, i_match, i_keys, i_depth,
- i_buffer_initial_length, i_dup, i_script_safe, i_escape_slash, i_strict;
+static VALUE mJSON, cState, mString_Extend, eGeneratorError, eNestingError;
+
+static ID i_to_s, i_to_json, i_new, i_pack, i_unpack, i_create_id, i_extend;
/* Converts in_string to a JSON string (without the wrapping '"'
* characters) in FBuffer out_buffer.
@@ -34,14 +25,13 @@ static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before,
* Everything else (should be UTF-8) is just passed through and
* appended to the result.
*/
-static void convert_UTF8_to_JSON(FBuffer *out_buffer, VALUE in_string, bool out_ascii_only, bool out_script_safe)
+static void convert_UTF8_to_JSON(FBuffer *out_buffer, VALUE in_string, bool out_script_safe)
{
const char *hexdig = "0123456789abcdef";
char scratch[12] = { '\\', 'u', 0, 0, 0, 0, '\\', 'u' };
const char *in_utf8_str = RSTRING_PTR(in_string);
unsigned long in_utf8_len = RSTRING_LEN(in_string);
- bool in_is_ascii_only = rb_enc_str_asciionly_p(in_string);
unsigned long beg = 0, pos;
@@ -51,30 +41,178 @@ static void convert_UTF8_to_JSON(FBuffer *out_buffer, VALUE in_string, bool out_
bool should_escape;
/* UTF-8 decoding */
- if (in_is_ascii_only) {
- ch = in_utf8_str[pos];
- ch_len = 1;
- } else {
- short i;
- if ((in_utf8_str[pos] & 0x80) == 0x00) { ch_len = 1; ch = in_utf8_str[pos]; } /* leading 1 bit is 0b0 */
- else if ((in_utf8_str[pos] & 0xE0) == 0xC0) { ch_len = 2; ch = in_utf8_str[pos] & 0x1F; } /* leading 3 bits are 0b110 */
- else if ((in_utf8_str[pos] & 0xF0) == 0xE0) { ch_len = 3; ch = in_utf8_str[pos] & 0x0F; } /* leading 4 bits are 0b1110 */
- else if ((in_utf8_str[pos] & 0xF8) == 0xF0) { ch_len = 4; ch = in_utf8_str[pos] & 0x07; } /* leading 5 bits are 0b11110 */
- else
- rb_raise(rb_path2class("JSON::GeneratorError"),
- "source sequence is illegal/malformed utf-8");
- if ((pos+ch_len) > in_utf8_len)
- rb_raise(rb_path2class("JSON::GeneratorError"),
- "partial character in source, but hit end");
- for (i = 1; i < ch_len; i++) {
- if ((in_utf8_str[pos+i] & 0xC0) != 0x80) /* leading 2 bits should be 0b10 */
- rb_raise(rb_path2class("JSON::GeneratorError"),
- "source sequence is illegal/malformed utf-8");
- ch = (ch<<6) | (in_utf8_str[pos+i] & 0x3F);
+ short i;
+ if ((in_utf8_str[pos] & 0x80) == 0x00) { ch_len = 1; ch = in_utf8_str[pos]; } /* leading 1 bit is 0b0 */
+ else if ((in_utf8_str[pos] & 0xE0) == 0xC0) { ch_len = 2; ch = in_utf8_str[pos] & 0x1F; } /* leading 3 bits are 0b110 */
+ else if ((in_utf8_str[pos] & 0xF0) == 0xE0) { ch_len = 3; ch = in_utf8_str[pos] & 0x0F; } /* leading 4 bits are 0b1110 */
+ else if ((in_utf8_str[pos] & 0xF8) == 0xF0) { ch_len = 4; ch = in_utf8_str[pos] & 0x07; } /* leading 5 bits are 0b11110 */
+ else {
+ rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf-8");
+ }
+
+ for (i = 1; i < ch_len; i++) {
+ ch = (ch<<6) | (in_utf8_str[pos+i] & 0x3F);
+ }
+
+ /* JSON policy */
+ should_escape =
+ (ch < 0x20) ||
+ (ch == '"') ||
+ (ch == '\\') ||
+ (out_script_safe && (ch == '/')) ||
+ (out_script_safe && (ch == 0x2028)) ||
+ (out_script_safe && (ch == 0x2029));
+
+ /* JSON encoding */
+ if (should_escape) {
+ if (pos > beg) {
+ fbuffer_append(out_buffer, &in_utf8_str[beg], pos - beg);
+ }
+
+ beg = pos + ch_len;
+ switch (ch) {
+ case '"': fbuffer_append(out_buffer, "\\\"", 2); break;
+ case '\\': fbuffer_append(out_buffer, "\\\\", 2); break;
+ case '/': fbuffer_append(out_buffer, "\\/", 2); break;
+ case '\b': fbuffer_append(out_buffer, "\\b", 2); break;
+ case '\f': fbuffer_append(out_buffer, "\\f", 2); break;
+ case '\n': fbuffer_append(out_buffer, "\\n", 2); break;
+ case '\r': fbuffer_append(out_buffer, "\\r", 2); break;
+ case '\t': fbuffer_append(out_buffer, "\\t", 2); break;
+ default:
+ if (ch <= 0xFFFF) {
+ scratch[2] = hexdig[ch >> 12];
+ scratch[3] = hexdig[(ch >> 8) & 0xf];
+ scratch[4] = hexdig[(ch >> 4) & 0xf];
+ scratch[5] = hexdig[ch & 0xf];
+ fbuffer_append(out_buffer, scratch, 6);
+ } else {
+ uint16_t hi, lo;
+ ch -= 0x10000;
+ hi = 0xD800 + (uint16_t)(ch >> 10);
+ lo = 0xDC00 + (uint16_t)(ch & 0x3FF);
+
+ scratch[2] = hexdig[hi >> 12];
+ scratch[3] = hexdig[(hi >> 8) & 0xf];
+ scratch[4] = hexdig[(hi >> 4) & 0xf];
+ scratch[5] = hexdig[hi & 0xf];
+
+ scratch[8] = hexdig[lo >> 12];
+ scratch[9] = hexdig[(lo >> 8) & 0xf];
+ scratch[10] = hexdig[(lo >> 4) & 0xf];
+ scratch[11] = hexdig[lo & 0xf];
+
+ fbuffer_append(out_buffer, scratch, 12);
+ }
}
- if (ch > 0x10FFFF)
- rb_raise(rb_path2class("JSON::GeneratorError"),
- "source sequence is illegal/malformed utf-8");
+ }
+
+ pos += ch_len;
+ }
+
+ if (beg < in_utf8_len) {
+ fbuffer_append(out_buffer, &in_utf8_str[beg], in_utf8_len - beg);
+ }
+
+ RB_GC_GUARD(in_string);
+}
+
+static const bool escape_table[256] = {
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* '"' */
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, /* '\\' */
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1
+};
+
+static const bool script_safe_escape_table[256] = {
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, /* '"' and '/' */
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, /* '\\' */
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1
+};
+
+static void convert_ASCII_to_JSON(FBuffer *out_buffer, VALUE str, const bool escape_table[256])
+{
+ const char *hexdig = "0123456789abcdef";
+ char scratch[12] = { '\\', 'u', 0, 0, 0, 0, '\\', 'u' };
+
+ const char *ptr = RSTRING_PTR(str);
+ unsigned long len = RSTRING_LEN(str);
+
+ unsigned long beg = 0, pos;
+
+ for (pos = 0; pos < len;) {
+ unsigned char ch = ptr[pos];
+ /* JSON encoding */
+ if (escape_table[ch]) {
+ if (pos > beg) {
+ fbuffer_append(out_buffer, &ptr[beg], pos - beg);
+ }
+
+ beg = pos + 1;
+ switch (ch) {
+ case '"': fbuffer_append(out_buffer, "\\\"", 2); break;
+ case '\\': fbuffer_append(out_buffer, "\\\\", 2); break;
+ case '/': fbuffer_append(out_buffer, "\\/", 2); break;
+ case '\b': fbuffer_append(out_buffer, "\\b", 2); break;
+ case '\f': fbuffer_append(out_buffer, "\\f", 2); break;
+ case '\n': fbuffer_append(out_buffer, "\\n", 2); break;
+ case '\r': fbuffer_append(out_buffer, "\\r", 2); break;
+ case '\t': fbuffer_append(out_buffer, "\\t", 2); break;
+ default:
+ scratch[2] = hexdig[ch >> 12];
+ scratch[3] = hexdig[(ch >> 8) & 0xf];
+ scratch[4] = hexdig[(ch >> 4) & 0xf];
+ scratch[5] = hexdig[ch & 0xf];
+ fbuffer_append(out_buffer, scratch, 6);
+ }
+ }
+
+ pos++;
+ }
+
+ if (beg < len) {
+ fbuffer_append(out_buffer, &ptr[beg], len - beg);
+ }
+
+ RB_GC_GUARD(str);
+}
+
+static void convert_UTF8_to_ASCII_only_JSON(FBuffer *out_buffer, VALUE in_string, bool out_script_safe)
+{
+ const char *hexdig = "0123456789abcdef";
+ char scratch[12] = { '\\', 'u', 0, 0, 0, 0, '\\', 'u' };
+
+ const char *in_utf8_str = RSTRING_PTR(in_string);
+ unsigned long in_utf8_len = RSTRING_LEN(in_string);
+
+ unsigned long beg = 0, pos;
+
+ for (pos = 0; pos < in_utf8_len;) {
+ uint32_t ch;
+ short ch_len;
+ bool should_escape;
+
+ /* UTF-8 decoding */
+ short i;
+ if ((in_utf8_str[pos] & 0x80) == 0x00) { ch_len = 1; ch = in_utf8_str[pos]; } /* leading 1 bit is 0b0 */
+ else if ((in_utf8_str[pos] & 0xE0) == 0xC0) { ch_len = 2; ch = in_utf8_str[pos] & 0x1F; } /* leading 3 bits are 0b110 */
+ else if ((in_utf8_str[pos] & 0xF0) == 0xE0) { ch_len = 3; ch = in_utf8_str[pos] & 0x0F; } /* leading 4 bits are 0b1110 */
+ else if ((in_utf8_str[pos] & 0xF8) == 0xF0) { ch_len = 4; ch = in_utf8_str[pos] & 0x07; } /* leading 5 bits are 0b11110 */
+ else {
+ rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf-8");
+ }
+
+ for (i = 1; i < ch_len; i++) {
+ ch = (ch<<6) | (in_utf8_str[pos+i] & 0x3F);
}
/* JSON policy */
@@ -82,15 +220,17 @@ static void convert_UTF8_to_JSON(FBuffer *out_buffer, VALUE in_string, bool out_
(ch < 0x20) ||
(ch == '"') ||
(ch == '\\') ||
- (out_ascii_only && (ch > 0x7F)) ||
+ (ch > 0x7F) ||
(out_script_safe && (ch == '/')) ||
(out_script_safe && (ch == 0x2028)) ||
(out_script_safe && (ch == 0x2029));
/* JSON encoding */
if (should_escape) {
- if (pos > beg)
+ if (pos > beg) {
fbuffer_append(out_buffer, &in_utf8_str[beg], pos - beg);
+ }
+
beg = pos + ch_len;
switch (ch) {
case '"': fbuffer_append(out_buffer, "\\\"", 2); break;
@@ -131,8 +271,11 @@ static void convert_UTF8_to_JSON(FBuffer *out_buffer, VALUE in_string, bool out_
pos += ch_len;
}
- if (beg < in_utf8_len)
+
+ if (beg < in_utf8_len) {
fbuffer_append(out_buffer, &in_utf8_str[beg], in_utf8_len - beg);
+ }
+
RB_GC_GUARD(in_string);
}
@@ -417,9 +560,6 @@ static void State_free(void *ptr)
if (state->space_before) ruby_xfree(state->space_before);
if (state->object_nl) ruby_xfree(state->object_nl);
if (state->array_nl) ruby_xfree(state->array_nl);
- if (state->array_delim) fbuffer_free(state->array_delim);
- if (state->object_delim) fbuffer_free(state->object_delim);
- if (state->object_delim2) fbuffer_free(state->object_delim2);
ruby_xfree(state);
}
@@ -432,9 +572,6 @@ static size_t State_memsize(const void *ptr)
if (state->space_before) size += state->space_before_len + 1;
if (state->object_nl) size += state->object_nl_len + 1;
if (state->array_nl) size += state->array_nl_len + 1;
- if (state->array_delim) size += FBUFFER_CAPA(state->array_delim);
- if (state->object_delim) size += FBUFFER_CAPA(state->object_delim);
- if (state->object_delim2) size += FBUFFER_CAPA(state->object_delim2);
return size;
}
@@ -453,180 +590,10 @@ static const rb_data_type_t JSON_Generator_State_type = {
static VALUE cState_s_allocate(VALUE klass)
{
JSON_Generator_State *state;
- return TypedData_Make_Struct(klass, JSON_Generator_State,
- &JSON_Generator_State_type, state);
-}
-
-/*
- * call-seq: configure(opts)
- *
- * Configure this State instance with the Hash _opts_, and return
- * itself.
- */
-static VALUE cState_configure(VALUE self, VALUE opts)
-{
- VALUE tmp;
- GET_STATE(self);
- tmp = rb_check_convert_type(opts, T_HASH, "Hash", "to_hash");
- if (NIL_P(tmp)) tmp = rb_convert_type(opts, T_HASH, "Hash", "to_h");
- opts = tmp;
- tmp = rb_hash_aref(opts, ID2SYM(i_indent));
- if (RTEST(tmp)) {
- unsigned long len;
- Check_Type(tmp, T_STRING);
- len = RSTRING_LEN(tmp);
- state->indent = fstrndup(RSTRING_PTR(tmp), len + 1);
- state->indent_len = len;
- }
- tmp = rb_hash_aref(opts, ID2SYM(i_space));
- if (RTEST(tmp)) {
- unsigned long len;
- Check_Type(tmp, T_STRING);
- len = RSTRING_LEN(tmp);
- state->space = fstrndup(RSTRING_PTR(tmp), len + 1);
- state->space_len = len;
- }
- tmp = rb_hash_aref(opts, ID2SYM(i_space_before));
- if (RTEST(tmp)) {
- unsigned long len;
- Check_Type(tmp, T_STRING);
- len = RSTRING_LEN(tmp);
- state->space_before = fstrndup(RSTRING_PTR(tmp), len + 1);
- state->space_before_len = len;
- }
- tmp = rb_hash_aref(opts, ID2SYM(i_array_nl));
- if (RTEST(tmp)) {
- unsigned long len;
- Check_Type(tmp, T_STRING);
- len = RSTRING_LEN(tmp);
- state->array_nl = fstrndup(RSTRING_PTR(tmp), len + 1);
- state->array_nl_len = len;
- }
- tmp = rb_hash_aref(opts, ID2SYM(i_object_nl));
- if (RTEST(tmp)) {
- unsigned long len;
- Check_Type(tmp, T_STRING);
- len = RSTRING_LEN(tmp);
- state->object_nl = fstrndup(RSTRING_PTR(tmp), len + 1);
- state->object_nl_len = len;
- }
- tmp = ID2SYM(i_max_nesting);
+ VALUE obj = TypedData_Make_Struct(klass, JSON_Generator_State, &JSON_Generator_State_type, state);
state->max_nesting = 100;
- if (option_given_p(opts, tmp)) {
- VALUE max_nesting = rb_hash_aref(opts, tmp);
- if (RTEST(max_nesting)) {
- Check_Type(max_nesting, T_FIXNUM);
- state->max_nesting = FIX2LONG(max_nesting);
- } else {
- state->max_nesting = 0;
- }
- }
- tmp = ID2SYM(i_depth);
- state->depth = 0;
- if (option_given_p(opts, tmp)) {
- VALUE depth = rb_hash_aref(opts, tmp);
- if (RTEST(depth)) {
- Check_Type(depth, T_FIXNUM);
- state->depth = FIX2LONG(depth);
- } else {
- state->depth = 0;
- }
- }
- tmp = ID2SYM(i_buffer_initial_length);
- if (option_given_p(opts, tmp)) {
- VALUE buffer_initial_length = rb_hash_aref(opts, tmp);
- if (RTEST(buffer_initial_length)) {
- long initial_length;
- Check_Type(buffer_initial_length, T_FIXNUM);
- initial_length = FIX2LONG(buffer_initial_length);
- if (initial_length > 0) state->buffer_initial_length = initial_length;
- }
- }
- tmp = rb_hash_aref(opts, ID2SYM(i_allow_nan));
- state->allow_nan = RTEST(tmp);
- tmp = rb_hash_aref(opts, ID2SYM(i_ascii_only));
- state->ascii_only = RTEST(tmp);
- tmp = rb_hash_aref(opts, ID2SYM(i_script_safe));
- state->script_safe = RTEST(tmp);
- if (!state->script_safe) {
- tmp = rb_hash_aref(opts, ID2SYM(i_escape_slash));
- state->script_safe = RTEST(tmp);
- }
- tmp = rb_hash_aref(opts, ID2SYM(i_strict));
- state->strict = RTEST(tmp);
- return self;
-}
-
-static void set_state_ivars(VALUE hash, VALUE state)
-{
- VALUE ivars = rb_obj_instance_variables(state);
- int i = 0;
- for (i = 0; i < RARRAY_LEN(ivars); i++) {
- VALUE key = rb_funcall(rb_ary_entry(ivars, i), i_to_s, 0);
- long key_len = RSTRING_LEN(key);
- VALUE value = rb_iv_get(state, StringValueCStr(key));
- rb_hash_aset(hash, rb_str_intern(rb_str_substr(key, 1, key_len - 1)), value);
- }
-}
-
-/*
- * call-seq: to_h
- *
- * Returns the configuration instance variables as a hash, that can be
- * passed to the configure method.
- */
-static VALUE cState_to_h(VALUE self)
-{
- VALUE result = rb_hash_new();
- GET_STATE(self);
- set_state_ivars(result, self);
- rb_hash_aset(result, ID2SYM(i_indent), rb_str_new(state->indent, state->indent_len));
- rb_hash_aset(result, ID2SYM(i_space), rb_str_new(state->space, state->space_len));
- rb_hash_aset(result, ID2SYM(i_space_before), rb_str_new(state->space_before, state->space_before_len));
- rb_hash_aset(result, ID2SYM(i_object_nl), rb_str_new(state->object_nl, state->object_nl_len));
- rb_hash_aset(result, ID2SYM(i_array_nl), rb_str_new(state->array_nl, state->array_nl_len));
- rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse);
- rb_hash_aset(result, ID2SYM(i_ascii_only), state->ascii_only ? Qtrue : Qfalse);
- rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting));
- rb_hash_aset(result, ID2SYM(i_script_safe), state->script_safe ? Qtrue : Qfalse);
- rb_hash_aset(result, ID2SYM(i_strict), state->strict ? Qtrue : Qfalse);
- rb_hash_aset(result, ID2SYM(i_depth), LONG2FIX(state->depth));
- rb_hash_aset(result, ID2SYM(i_buffer_initial_length), LONG2FIX(state->buffer_initial_length));
- return result;
-}
-
-/*
-* call-seq: [](name)
-*
-* Returns the value returned by method +name+.
-*/
-static VALUE cState_aref(VALUE self, VALUE name)
-{
- name = rb_funcall(name, i_to_s, 0);
- if (RTEST(rb_funcall(self, i_respond_to_p, 1, name))) {
- return rb_funcall(self, i_send, 1, name);
- } else {
- return rb_attr_get(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)));
- }
-}
-
-/*
-* call-seq: []=(name, value)
-*
-* Sets the attribute name to value.
-*/
-static VALUE cState_aset(VALUE self, VALUE name, VALUE value)
-{
- VALUE name_writer;
-
- name = rb_funcall(name, i_to_s, 0);
- name_writer = rb_str_cat2(rb_str_dup(name), "=");
- if (RTEST(rb_funcall(self, i_respond_to_p, 1, name_writer))) {
- return rb_funcall(self, i_send, 2, name_writer, value);
- } else {
- rb_ivar_set(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)), value);
- }
- return Qnil;
+ state->buffer_initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT;
+ return obj;
}
struct hash_foreach_arg {
@@ -644,24 +611,16 @@ json_object_i(VALUE key, VALUE val, VALUE _arg)
JSON_Generator_State *state = arg->state;
VALUE Vstate = arg->Vstate;
- char *object_nl = state->object_nl;
- long object_nl_len = state->object_nl_len;
- char *indent = state->indent;
- long indent_len = state->indent_len;
- char *delim = FBUFFER_PTR(state->object_delim);
- long delim_len = FBUFFER_LEN(state->object_delim);
- char *delim2 = FBUFFER_PTR(state->object_delim2);
- long delim2_len = FBUFFER_LEN(state->object_delim2);
long depth = state->depth;
int j;
- if (arg->iter > 0) fbuffer_append(buffer, delim, delim_len);
- if (object_nl) {
- fbuffer_append(buffer, object_nl, object_nl_len);
+ if (arg->iter > 0) fbuffer_append_char(buffer, ',');
+ if (RB_UNLIKELY(state->object_nl)) {
+ fbuffer_append(buffer, state->object_nl, state->object_nl_len);
}
- if (indent) {
+ if (RB_UNLIKELY(state->indent)) {
for (j = 0; j < depth; j++) {
- fbuffer_append(buffer, indent, indent_len);
+ fbuffer_append(buffer, state->indent, state->indent_len);
}
}
@@ -678,8 +637,10 @@ json_object_i(VALUE key, VALUE val, VALUE _arg)
break;
}
- generate_json(buffer, Vstate, state, key_to_s);
- fbuffer_append(buffer, delim2, delim2_len);
+ generate_json_string(buffer, Vstate, state, key_to_s);
+ if (RB_UNLIKELY(state->space_before)) fbuffer_append(buffer, state->space_before, state->space_before_len);
+ fbuffer_append_char(buffer, ':');
+ if (RB_UNLIKELY(state->space)) fbuffer_append(buffer, state->space, state->space_len);
generate_json(buffer, Vstate, state, val);
arg->iter++;
@@ -688,10 +649,6 @@ json_object_i(VALUE key, VALUE val, VALUE _arg)
static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
{
- char *object_nl = state->object_nl;
- long object_nl_len = state->object_nl_len;
- char *indent = state->indent;
- long indent_len = state->indent_len;
long max_nesting = state->max_nesting;
long depth = ++state->depth;
int j;
@@ -709,11 +666,11 @@ static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_S
rb_hash_foreach(obj, json_object_i, (VALUE)&arg);
depth = --state->depth;
- if (object_nl) {
- fbuffer_append(buffer, object_nl, object_nl_len);
- if (indent) {
+ if (RB_UNLIKELY(state->object_nl)) {
+ fbuffer_append(buffer, state->object_nl, state->object_nl_len);
+ if (RB_UNLIKELY(state->indent)) {
for (j = 0; j < depth; j++) {
- fbuffer_append(buffer, indent, indent_len);
+ fbuffer_append(buffer, state->indent, state->indent_len);
}
}
}
@@ -722,55 +679,70 @@ static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_S
static void generate_json_array(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
{
- char *array_nl = state->array_nl;
- long array_nl_len = state->array_nl_len;
- char *indent = state->indent;
- long indent_len = state->indent_len;
long max_nesting = state->max_nesting;
- char *delim = FBUFFER_PTR(state->array_delim);
- long delim_len = FBUFFER_LEN(state->array_delim);
long depth = ++state->depth;
int i, j;
if (max_nesting != 0 && depth > max_nesting) {
rb_raise(eNestingError, "nesting of %ld is too deep", --state->depth);
}
fbuffer_append_char(buffer, '[');
- if (array_nl) fbuffer_append(buffer, array_nl, array_nl_len);
+ if (RB_UNLIKELY(state->array_nl)) fbuffer_append(buffer, state->array_nl, state->array_nl_len);
for(i = 0; i < RARRAY_LEN(obj); i++) {
- if (i > 0) fbuffer_append(buffer, delim, delim_len);
- if (indent) {
+ if (i > 0) {
+ fbuffer_append_char(buffer, ',');
+ if (RB_UNLIKELY(state->array_nl)) fbuffer_append(buffer, state->array_nl, state->array_nl_len);
+ }
+ if (RB_UNLIKELY(state->indent)) {
for (j = 0; j < depth; j++) {
- fbuffer_append(buffer, indent, indent_len);
+ fbuffer_append(buffer, state->indent, state->indent_len);
}
}
- generate_json(buffer, Vstate, state, rb_ary_entry(obj, i));
+ generate_json(buffer, Vstate, state, RARRAY_AREF(obj, i));
}
state->depth = --depth;
- if (array_nl) {
- fbuffer_append(buffer, array_nl, array_nl_len);
- if (indent) {
+ if (RB_UNLIKELY(state->array_nl)) {
+ fbuffer_append(buffer, state->array_nl, state->array_nl_len);
+ if (RB_UNLIKELY(state->indent)) {
for (j = 0; j < depth; j++) {
- fbuffer_append(buffer, indent, indent_len);
+ fbuffer_append(buffer, state->indent, state->indent_len);
}
}
}
fbuffer_append_char(buffer, ']');
}
-static int enc_utf8_compatible_p(rb_encoding *enc)
+static int usascii_encindex, utf8_encindex;
+
+static int enc_utf8_compatible_p(int enc_idx)
{
- if (enc == rb_usascii_encoding()) return 1;
- if (enc == rb_utf8_encoding()) return 1;
+ if (enc_idx == usascii_encindex) return 1;
+ if (enc_idx == utf8_encindex) return 1;
return 0;
}
static void generate_json_string(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
{
- fbuffer_append_char(buffer, '"');
- if (!enc_utf8_compatible_p(rb_enc_get(obj))) {
+ if (!enc_utf8_compatible_p(RB_ENCODING_GET(obj))) {
obj = rb_str_export_to_enc(obj, rb_utf8_encoding());
}
- convert_UTF8_to_JSON(buffer, obj, state->ascii_only, state->script_safe);
+
+ fbuffer_append_char(buffer, '"');
+
+ switch(rb_enc_str_coderange(obj)) {
+ case ENC_CODERANGE_7BIT:
+ convert_ASCII_to_JSON(buffer, obj, state->script_safe ? script_safe_escape_table : escape_table);
+ break;
+ case ENC_CODERANGE_VALID:
+ if (RB_UNLIKELY(state->ascii_only)) {
+ convert_UTF8_to_ASCII_only_JSON(buffer, obj, state->script_safe);
+ } else {
+ convert_UTF8_to_JSON(buffer, obj, state->script_safe);
+ }
+ break;
+ default:
+ rb_raise(rb_path2class("JSON::GeneratorError"), "source sequence is illegal/malformed utf-8");
+ break;
+ }
fbuffer_append_char(buffer, '"');
}
@@ -816,9 +788,9 @@ static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_St
VALUE tmp = rb_funcall(obj, i_to_s, 0);
if (!allow_nan) {
if (isinf(value)) {
- rb_raise(eGeneratorError, "%"PRIsVALUE" not allowed in JSON", RB_OBJ_STRING(tmp));
+ rb_raise(eGeneratorError, "%"PRIsVALUE" not allowed in JSON", tmp);
} else if (isnan(value)) {
- rb_raise(eGeneratorError, "%"PRIsVALUE" not allowed in JSON", RB_OBJ_STRING(tmp));
+ rb_raise(eGeneratorError, "%"PRIsVALUE" not allowed in JSON", tmp);
}
}
fbuffer_append_str(buffer, tmp);
@@ -827,35 +799,56 @@ static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_St
static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
{
VALUE tmp;
- VALUE klass = CLASS_OF(obj);
- if (klass == rb_cHash) {
- generate_json_object(buffer, Vstate, state, obj);
- } else if (klass == rb_cArray) {
- generate_json_array(buffer, Vstate, state, obj);
- } else if (klass == rb_cString) {
- generate_json_string(buffer, Vstate, state, obj);
- } else if (obj == Qnil) {
+ if (obj == Qnil) {
generate_json_null(buffer, Vstate, state, obj);
} else if (obj == Qfalse) {
generate_json_false(buffer, Vstate, state, obj);
} else if (obj == Qtrue) {
generate_json_true(buffer, Vstate, state, obj);
- } else if (FIXNUM_P(obj)) {
- generate_json_fixnum(buffer, Vstate, state, obj);
- } else if (RB_TYPE_P(obj, T_BIGNUM)) {
- generate_json_bignum(buffer, Vstate, state, obj);
- } else if (klass == rb_cFloat) {
- generate_json_float(buffer, Vstate, state, obj);
- } else if (state->strict) {
- rb_raise(eGeneratorError, "%"PRIsVALUE" not allowed in JSON", RB_OBJ_STRING(CLASS_OF(obj)));
- } else if (rb_respond_to(obj, i_to_json)) {
- tmp = rb_funcall(obj, i_to_json, 1, Vstate);
- Check_Type(tmp, T_STRING);
- fbuffer_append_str(buffer, tmp);
+ } else if (RB_SPECIAL_CONST_P(obj)) {
+ if (RB_FIXNUM_P(obj)) {
+ generate_json_fixnum(buffer, Vstate, state, obj);
+ } else if (RB_FLONUM_P(obj)) {
+ generate_json_float(buffer, Vstate, state, obj);
+ } else {
+ goto general;
+ }
} else {
- tmp = rb_funcall(obj, i_to_s, 0);
- Check_Type(tmp, T_STRING);
- generate_json_string(buffer, Vstate, state, tmp);
+ VALUE klass = RBASIC_CLASS(obj);
+ switch (RB_BUILTIN_TYPE(obj)) {
+ case T_BIGNUM:
+ generate_json_bignum(buffer, Vstate, state, obj);
+ break;
+ case T_HASH:
+ if (klass != rb_cHash) goto general;
+ generate_json_object(buffer, Vstate, state, obj);
+ break;
+ case T_ARRAY:
+ if (klass != rb_cArray) goto general;
+ generate_json_array(buffer, Vstate, state, obj);
+ break;
+ case T_STRING:
+ if (klass != rb_cString) goto general;
+ generate_json_string(buffer, Vstate, state, obj);
+ break;
+ case T_FLOAT:
+ if (klass != rb_cFloat) goto general;
+ generate_json_float(buffer, Vstate, state, obj);
+ break;
+ default:
+ general:
+ if (state->strict) {
+ rb_raise(eGeneratorError, "%"PRIsVALUE" not allowed in JSON", CLASS_OF(obj));
+ } else if (rb_respond_to(obj, i_to_json)) {
+ tmp = rb_funcall(obj, i_to_json, 1, Vstate);
+ Check_Type(tmp, T_STRING);
+ fbuffer_append_str(buffer, tmp);
+ } else {
+ tmp = rb_funcall(obj, i_to_s, 0);
+ Check_Type(tmp, T_STRING);
+ generate_json_string(buffer, Vstate, state, tmp);
+ }
+ }
}
}
@@ -865,28 +858,6 @@ static FBuffer *cState_prepare_buffer(VALUE self)
GET_STATE(self);
buffer = fbuffer_alloc(state->buffer_initial_length);
- if (state->object_delim) {
- fbuffer_clear(state->object_delim);
- } else {
- state->object_delim = fbuffer_alloc(16);
- }
- fbuffer_append_char(state->object_delim, ',');
- if (state->object_delim2) {
- fbuffer_clear(state->object_delim2);
- } else {
- state->object_delim2 = fbuffer_alloc(16);
- }
- if (state->space_before) fbuffer_append(state->object_delim2, state->space_before, state->space_before_len);
- fbuffer_append_char(state->object_delim2, ':');
- if (state->space) fbuffer_append(state->object_delim2, state->space, state->space_len);
-
- if (state->array_delim) {
- fbuffer_clear(state->array_delim);
- } else {
- state->array_delim = fbuffer_alloc(16);
- }
- fbuffer_append_char(state->array_delim, ',');
- if (state->array_nl) fbuffer_append(state->array_delim, state->array_nl, state->array_nl_len);
return buffer;
}
@@ -947,37 +918,6 @@ static VALUE cState_generate(VALUE self, VALUE obj)
return result;
}
-/*
- * call-seq: new(opts = {})
- *
- * Instantiates a new State object, configured by _opts_.
- *
- * _opts_ can have the following keys:
- *
- * * *indent*: a string used to indent levels (default: ''),
- * * *space*: a string that is put after, a : or , delimiter (default: ''),
- * * *space_before*: a string that is put before a : pair delimiter (default: ''),
- * * *object_nl*: a string that is put at the end of a JSON object (default: ''),
- * * *array_nl*: a string that is put at the end of a JSON array (default: ''),
- * * *allow_nan*: true if NaN, Infinity, and -Infinity should be
- * generated, otherwise an exception is thrown, if these values are
- * encountered. This options defaults to false.
- * * *ascii_only*: true if only ASCII characters should be generated. This
- * option defaults to false.
- * * *buffer_initial_length*: sets the initial length of the generator's
- * internal buffer.
- */
-static VALUE cState_initialize(int argc, VALUE *argv, VALUE self)
-{
- VALUE opts;
- GET_STATE(self);
- state->max_nesting = 100;
- state->buffer_initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT;
- rb_scan_args(argc, argv, "01", &opts);
- if (!NIL_P(opts)) cState_configure(self, opts);
- return self;
-}
-
/*
* call-seq: initialize_copy(orig)
*
@@ -999,9 +939,6 @@ static VALUE cState_init_copy(VALUE obj, VALUE orig)
objState->space_before = fstrndup(origState->space_before, origState->space_before_len);
objState->object_nl = fstrndup(origState->object_nl, origState->object_nl_len);
objState->array_nl = fstrndup(origState->array_nl, origState->array_nl_len);
- if (origState->array_delim) objState->array_delim = fbuffer_dup(origState->array_delim);
- if (origState->object_delim) objState->object_delim = fbuffer_dup(origState->object_delim);
- if (origState->object_delim2) objState->object_delim2 = fbuffer_dup(origState->object_delim2);
return obj;
}
@@ -1240,7 +1177,8 @@ static VALUE cState_max_nesting_set(VALUE self, VALUE depth)
{
GET_STATE(self);
Check_Type(depth, T_FIXNUM);
- return state->max_nesting = FIX2LONG(depth);
+ state->max_nesting = FIX2LONG(depth);
+ return Qnil;
}
/*
@@ -1311,6 +1249,18 @@ static VALUE cState_allow_nan_p(VALUE self)
return state->allow_nan ? Qtrue : Qfalse;
}
+/*
+ * call-seq: allow_nan=(enable)
+ *
+ * This sets whether or not to serialize NaN, Infinity, and -Infinity
+ */
+static VALUE cState_allow_nan_set(VALUE self, VALUE enable)
+{
+ GET_STATE(self);
+ state->allow_nan = RTEST(enable);
+ return Qnil;
+}
+
/*
* call-seq: ascii_only?
*
@@ -1323,6 +1273,18 @@ static VALUE cState_ascii_only_p(VALUE self)
return state->ascii_only ? Qtrue : Qfalse;
}
+/*
+ * call-seq: ascii_only=(enable)
+ *
+ * This sets whether only ASCII characters should be generated.
+ */
+static VALUE cState_ascii_only_set(VALUE self, VALUE enable)
+{
+ GET_STATE(self);
+ state->ascii_only = RTEST(enable);
+ return Qnil;
+}
+
/*
* call-seq: depth
*
@@ -1390,8 +1352,8 @@ void Init_generator(void)
rb_require("json/common");
mJSON = rb_define_module("JSON");
- mExt = rb_define_module_under(mJSON, "Ext");
- mGenerator = rb_define_module_under(mExt, "Generator");
+ VALUE mExt = rb_define_module_under(mJSON, "Ext");
+ VALUE mGenerator = rb_define_module_under(mExt, "Generator");
eGeneratorError = rb_path2class("JSON::GeneratorError");
eNestingError = rb_path2class("JSON::NestingError");
@@ -1401,7 +1363,6 @@ void Init_generator(void)
cState = rb_define_class_under(mGenerator, "State", rb_cObject);
rb_define_alloc_func(cState, cState_s_allocate);
rb_define_singleton_method(cState, "from_state", cState_from_state_s, 1);
- rb_define_method(cState, "initialize", cState_initialize, -1);
rb_define_method(cState, "initialize_copy", cState_init_copy, 1);
rb_define_method(cState, "indent", cState_indent, 0);
rb_define_method(cState, "indent=", cState_indent_set, 1);
@@ -1426,76 +1387,65 @@ void Init_generator(void)
rb_define_method(cState, "strict=", cState_strict_set, 1);
rb_define_method(cState, "check_circular?", cState_check_circular_p, 0);
rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0);
+ rb_define_method(cState, "allow_nan=", cState_allow_nan_set, 1);
rb_define_method(cState, "ascii_only?", cState_ascii_only_p, 0);
+ rb_define_method(cState, "ascii_only=", cState_ascii_only_set, 1);
rb_define_method(cState, "depth", cState_depth, 0);
rb_define_method(cState, "depth=", cState_depth_set, 1);
rb_define_method(cState, "buffer_initial_length", cState_buffer_initial_length, 0);
rb_define_method(cState, "buffer_initial_length=", cState_buffer_initial_length_set, 1);
- rb_define_method(cState, "configure", cState_configure, 1);
- rb_define_alias(cState, "merge", "configure");
- rb_define_method(cState, "to_h", cState_to_h, 0);
- rb_define_alias(cState, "to_hash", "to_h");
- rb_define_method(cState, "[]", cState_aref, 1);
- rb_define_method(cState, "[]=", cState_aset, 2);
rb_define_method(cState, "generate", cState_generate, 1);
- mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods");
- mObject = rb_define_module_under(mGeneratorMethods, "Object");
+ VALUE mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods");
+
+ VALUE mObject = rb_define_module_under(mGeneratorMethods, "Object");
rb_define_method(mObject, "to_json", mObject_to_json, -1);
- mHash = rb_define_module_under(mGeneratorMethods, "Hash");
+
+ VALUE mHash = rb_define_module_under(mGeneratorMethods, "Hash");
rb_define_method(mHash, "to_json", mHash_to_json, -1);
- mArray = rb_define_module_under(mGeneratorMethods, "Array");
+
+ VALUE mArray = rb_define_module_under(mGeneratorMethods, "Array");
rb_define_method(mArray, "to_json", mArray_to_json, -1);
+
#ifdef RUBY_INTEGER_UNIFICATION
- mInteger = rb_define_module_under(mGeneratorMethods, "Integer");
+ VALUE mInteger = rb_define_module_under(mGeneratorMethods, "Integer");
rb_define_method(mInteger, "to_json", mInteger_to_json, -1);
#else
- mFixnum = rb_define_module_under(mGeneratorMethods, "Fixnum");
+ VALUE mFixnum = rb_define_module_under(mGeneratorMethods, "Fixnum");
rb_define_method(mFixnum, "to_json", mFixnum_to_json, -1);
- mBignum = rb_define_module_under(mGeneratorMethods, "Bignum");
+
+ VALUE mBignum = rb_define_module_under(mGeneratorMethods, "Bignum");
rb_define_method(mBignum, "to_json", mBignum_to_json, -1);
#endif
- mFloat = rb_define_module_under(mGeneratorMethods, "Float");
+ VALUE mFloat = rb_define_module_under(mGeneratorMethods, "Float");
rb_define_method(mFloat, "to_json", mFloat_to_json, -1);
- mString = rb_define_module_under(mGeneratorMethods, "String");
+
+ VALUE mString = rb_define_module_under(mGeneratorMethods, "String");
rb_define_singleton_method(mString, "included", mString_included_s, 1);
rb_define_method(mString, "to_json", mString_to_json, -1);
rb_define_method(mString, "to_json_raw", mString_to_json_raw, -1);
rb_define_method(mString, "to_json_raw_object", mString_to_json_raw_object, 0);
+
mString_Extend = rb_define_module_under(mString, "Extend");
rb_define_method(mString_Extend, "json_create", mString_Extend_json_create, 1);
- mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass");
+
+ VALUE mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass");
rb_define_method(mTrueClass, "to_json", mTrueClass_to_json, -1);
- mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass");
+
+ VALUE mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass");
rb_define_method(mFalseClass, "to_json", mFalseClass_to_json, -1);
- mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass");
+
+ VALUE mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass");
rb_define_method(mNilClass, "to_json", mNilClass_to_json, -1);
i_to_s = rb_intern("to_s");
i_to_json = rb_intern("to_json");
i_new = rb_intern("new");
- i_indent = rb_intern("indent");
- i_space = rb_intern("space");
- i_space_before = rb_intern("space_before");
- i_object_nl = rb_intern("object_nl");
- i_array_nl = rb_intern("array_nl");
- i_max_nesting = rb_intern("max_nesting");
- i_script_safe = rb_intern("script_safe");
- i_escape_slash = rb_intern("escape_slash");
- i_strict = rb_intern("strict");
- i_allow_nan = rb_intern("allow_nan");
- i_ascii_only = rb_intern("ascii_only");
- i_depth = rb_intern("depth");
- i_buffer_initial_length = rb_intern("buffer_initial_length");
i_pack = rb_intern("pack");
i_unpack = rb_intern("unpack");
i_create_id = rb_intern("create_id");
i_extend = rb_intern("extend");
- i_key_p = rb_intern("key?");
- i_aref = rb_intern("[]");
- i_send = rb_intern("__send__");
- i_respond_to_p = rb_intern("respond_to?");
- i_match = rb_intern("match");
- i_keys = rb_intern("keys");
- i_dup = rb_intern("dup");
+
+ usascii_encindex = rb_usascii_encindex();
+ utf8_encindex = rb_utf8_encindex();
}
diff --git a/ext/json/generator/generator.h b/ext/json/generator/generator.h
index 03709447ff7a4a..0553277fa60669 100644
--- a/ext/json/generator/generator.h
+++ b/ext/json/generator/generator.h
@@ -23,23 +23,6 @@ typedef unsigned char _Bool;
#endif
#endif
-#ifdef HAVE_RUBY_RE_H
-#include "ruby/re.h"
-#else
-#include "re.h"
-#endif
-
-#ifndef rb_intern_str
-#define rb_intern_str(string) SYM2ID(rb_str_intern(string))
-#endif
-
-#ifndef rb_obj_instance_variables
-#define rb_obj_instance_variables(object) rb_funcall(object, rb_intern("instance_variables"), 0)
-#endif
-
-#define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key))
-
-static void convert_UTF8_to_JSON(FBuffer *out_buffer, VALUE in_string, bool out_ascii_only, bool out_script_safe);
static char *fstrndup(const char *ptr, unsigned long len);
/* ruby api and some helpers */
@@ -55,9 +38,6 @@ typedef struct JSON_Generator_StateStruct {
long object_nl_len;
char *array_nl;
long array_nl_len;
- FBuffer *array_delim;
- FBuffer *object_delim;
- FBuffer *object_delim2;
long max_nesting;
char allow_nan;
char ascii_only;
@@ -106,8 +86,6 @@ static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self);
static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self);
static void State_free(void *state);
static VALUE cState_s_allocate(VALUE klass);
-static VALUE cState_configure(VALUE self, VALUE opts);
-static VALUE cState_to_h(VALUE self);
static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
static void generate_json_array(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
@@ -123,7 +101,6 @@ static void generate_json_bignum(FBuffer *buffer, VALUE Vstate, JSON_Generator_S
static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj);
static VALUE cState_partial_generate(VALUE self, VALUE obj);
static VALUE cState_generate(VALUE self, VALUE obj);
-static VALUE cState_initialize(int argc, VALUE *argv, VALUE self);
static VALUE cState_from_state_s(VALUE self, VALUE opts);
static VALUE cState_indent(VALUE self);
static VALUE cState_indent_set(VALUE self, VALUE indent);
@@ -146,15 +123,6 @@ static VALUE cState_script_safe_set(VALUE self, VALUE depth);
static VALUE cState_strict(VALUE self);
static VALUE cState_strict_set(VALUE self, VALUE strict);
static FBuffer *cState_prepare_buffer(VALUE self);
-#ifndef ZALLOC
-#define ZALLOC(type) ((type *)ruby_zalloc(sizeof(type)))
-static inline void *ruby_zalloc(size_t n)
-{
- void *p = ruby_xmalloc(n);
- memset(p, 0, n);
- return p;
-}
-#endif
static const rb_data_type_t JSON_Generator_State_type;
diff --git a/ext/json/lib/json/common.rb b/ext/json/lib/json/common.rb
index 95098d3bb48f79..403bd34b1dea45 100644
--- a/ext/json/lib/json/common.rb
+++ b/ext/json/lib/json/common.rb
@@ -20,11 +20,16 @@ class << self
# ruby = [0, 1, nil]
# JSON[ruby] # => '[0,1,null]'
def [](object, opts = {})
- if object.respond_to? :to_str
- JSON.parse(object.to_str, opts)
- else
- JSON.generate(object, opts)
+ if object.is_a?(String)
+ return JSON.parse(object, opts)
+ elsif object.respond_to?(:to_str)
+ str = object.to_str
+ if str.is_a?(String)
+ return JSON.parse(object.to_str, opts)
+ end
end
+
+ JSON.generate(object, opts)
end
# Returns the JSON parser class that is used by JSON. This is either
@@ -576,13 +581,12 @@ class << self
# Sets or returns the default options for the JSON.dump method.
# Initially:
# opts = JSON.dump_default_options
- # opts # => {:max_nesting=>false, :allow_nan=>true, :script_safe=>false}
+ # opts # => {:max_nesting=>false, :allow_nan=>true}
attr_accessor :dump_default_options
end
self.dump_default_options = {
:max_nesting => false,
:allow_nan => true,
- :script_safe => false,
}
# :call-seq:
@@ -613,26 +617,42 @@ class << self
# Output:
# {"foo":[0,1],"bar":{"baz":2,"bat":3},"bam":"bad"}
def dump(obj, anIO = nil, limit = nil, kwargs = nil)
- io_limit_opt = [anIO, limit, kwargs].compact
- kwargs = io_limit_opt.pop if io_limit_opt.last.is_a?(Hash)
- anIO, limit = io_limit_opt
- if anIO.respond_to?(:to_io)
- anIO = anIO.to_io
- elsif limit.nil? && !anIO.respond_to?(:write)
- anIO, limit = nil, anIO
+ if kwargs.nil?
+ if limit.nil?
+ if anIO.is_a?(Hash)
+ kwargs = anIO
+ anIO = nil
+ end
+ elsif limit.is_a?(Hash)
+ kwargs = limit
+ limit = nil
+ end
end
+
+ unless anIO.nil?
+ if anIO.respond_to?(:to_io)
+ anIO = anIO.to_io
+ elsif limit.nil? && !anIO.respond_to?(:write)
+ anIO, limit = nil, anIO
+ end
+ end
+
opts = JSON.dump_default_options
opts = opts.merge(:max_nesting => limit) if limit
opts = merge_dump_options(opts, **kwargs) if kwargs
- result = generate(obj, opts)
- if anIO
+
+ result = begin
+ generate(obj, opts)
+ rescue JSON::NestingError
+ raise ArgumentError, "exceed depth limit"
+ end
+
+ if anIO.nil?
+ result
+ else
anIO.write result
anIO
- else
- result
end
- rescue JSON::NestingError
- raise ArgumentError, "exceed depth limit"
end
# Encodes string using String.encode.
@@ -678,11 +698,16 @@ def jj(*objs)
# The _opts_ argument is passed through to generate/parse respectively. See
# generate and parse for their documentation.
def JSON(object, *args)
- if object.respond_to? :to_str
- JSON.parse(object.to_str, args.first)
- else
- JSON.generate(object, args.first)
+ if object.is_a?(String)
+ return JSON.parse(object, args.first)
+ elsif object.respond_to?(:to_str)
+ str = object.to_str
+ if str.is_a?(String)
+ return JSON.parse(object.to_str, args.first)
+ end
end
+
+ JSON.generate(object, args.first)
end
end
diff --git a/ext/json/lib/json/ext.rb b/ext/json/lib/json/ext.rb
index b62e2317123faa..775e28a967a8ef 100644
--- a/ext/json/lib/json/ext.rb
+++ b/ext/json/lib/json/ext.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'json/common'
module JSON
@@ -13,6 +15,9 @@ module Ext
else
require 'json/ext/parser'
require 'json/ext/generator'
+ unless RUBY_ENGINE == 'jruby'
+ require 'json/ext/generator/state'
+ end
$DEBUG and warn "Using Ext extension for JSON."
JSON.parser = Parser
JSON.generator = Generator
diff --git a/ext/json/parser/parser.c b/ext/json/parser/parser.c
index 37627edbd40baf..335b2a20ba5ec9 100644
--- a/ext/json/parser/parser.c
+++ b/ext/json/parser/parser.c
@@ -74,16 +74,20 @@ static VALUE CNaN, CInfinity, CMinusInfinity;
static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
i_chr, i_max_nesting, i_allow_nan, i_symbolize_names,
- i_object_class, i_array_class, i_decimal_class, i_key_p,
+ i_object_class, i_array_class, i_decimal_class,
i_deep_const_get, i_match, i_match_string, i_aset, i_aref,
i_leftshift, i_new, i_try_convert, i_freeze, i_uminus;
+static int binary_encindex;
+static int utf8_encindex;
-#line 105 "parser.rl"
+#line 109 "parser.rl"
-#line 87 "parser.c"
+
+
+#line 91 "parser.c"
enum {JSON_object_start = 1};
enum {JSON_object_first_final = 27};
enum {JSON_object_error = 0};
@@ -91,7 +95,7 @@ enum {JSON_object_error = 0};
enum {JSON_object_en_main = 1};
-#line 147 "parser.rl"
+#line 151 "parser.rl"
static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting)
@@ -107,14 +111,14 @@ static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *resu
*result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class);
-#line 111 "parser.c"
+#line 115 "parser.c"
{
cs = JSON_object_start;
}
-#line 162 "parser.rl"
+#line 166 "parser.rl"
-#line 118 "parser.c"
+#line 122 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -142,7 +146,7 @@ case 2:
goto st2;
goto st0;
tr2:
-#line 129 "parser.rl"
+#line 133 "parser.rl"
{
char *np;
json->parsing_name = 1;
@@ -155,7 +159,7 @@ case 2:
if ( ++p == pe )
goto _test_eof3;
case 3:
-#line 159 "parser.c"
+#line 163 "parser.c"
switch( (*p) ) {
case 13: goto st3;
case 32: goto st3;
@@ -222,7 +226,7 @@ case 8:
goto st8;
goto st0;
tr11:
-#line 113 "parser.rl"
+#line 117 "parser.rl"
{
VALUE v = Qnil;
char *np = JSON_parse_value(json, p, pe, &v, current_nesting);
@@ -243,7 +247,7 @@ case 8:
if ( ++p == pe )
goto _test_eof9;
case 9:
-#line 247 "parser.c"
+#line 251 "parser.c"
switch( (*p) ) {
case 13: goto st9;
case 32: goto st9;
@@ -332,14 +336,14 @@ case 18:
goto st9;
goto st18;
tr4:
-#line 137 "parser.rl"
+#line 141 "parser.rl"
{ p--; {p++; cs = 27; goto _out;} }
goto st27;
st27:
if ( ++p == pe )
goto _test_eof27;
case 27:
-#line 343 "parser.c"
+#line 347 "parser.c"
goto st0;
st19:
if ( ++p == pe )
@@ -437,7 +441,7 @@ case 26:
_out: {}
}
-#line 163 "parser.rl"
+#line 167 "parser.rl"
if (cs >= JSON_object_first_final) {
if (json->create_additions) {
@@ -462,7 +466,7 @@ case 26:
-#line 466 "parser.c"
+#line 470 "parser.c"
enum {JSON_value_start = 1};
enum {JSON_value_first_final = 29};
enum {JSON_value_error = 0};
@@ -470,7 +474,7 @@ enum {JSON_value_error = 0};
enum {JSON_value_en_main = 1};
-#line 263 "parser.rl"
+#line 267 "parser.rl"
static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting)
@@ -478,14 +482,14 @@ static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *resul
int cs = EVIL;
-#line 482 "parser.c"
+#line 486 "parser.c"
{
cs = JSON_value_start;
}
-#line 270 "parser.rl"
+#line 274 "parser.rl"
-#line 489 "parser.c"
+#line 493 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -519,14 +523,14 @@ case 1:
cs = 0;
goto _out;
tr2:
-#line 215 "parser.rl"
+#line 219 "parser.rl"
{
char *np = JSON_parse_string(json, p, pe, result);
if (np == NULL) { p--; {p++; cs = 29; goto _out;} } else {p = (( np))-1;}
}
goto st29;
tr3:
-#line 220 "parser.rl"
+#line 224 "parser.rl"
{
char *np;
if(pe > p + 8 && !strncmp(MinusInfinity, p, 9)) {
@@ -546,7 +550,7 @@ cs = 0;
}
goto st29;
tr7:
-#line 238 "parser.rl"
+#line 242 "parser.rl"
{
char *np;
np = JSON_parse_array(json, p, pe, result, current_nesting + 1);
@@ -554,7 +558,7 @@ cs = 0;
}
goto st29;
tr11:
-#line 244 "parser.rl"
+#line 248 "parser.rl"
{
char *np;
np = JSON_parse_object(json, p, pe, result, current_nesting + 1);
@@ -562,7 +566,7 @@ cs = 0;
}
goto st29;
tr25:
-#line 208 "parser.rl"
+#line 212 "parser.rl"
{
if (json->allow_nan) {
*result = CInfinity;
@@ -572,7 +576,7 @@ cs = 0;
}
goto st29;
tr27:
-#line 201 "parser.rl"
+#line 205 "parser.rl"
{
if (json->allow_nan) {
*result = CNaN;
@@ -582,19 +586,19 @@ cs = 0;
}
goto st29;
tr31:
-#line 195 "parser.rl"
+#line 199 "parser.rl"
{
*result = Qfalse;
}
goto st29;
tr34:
-#line 192 "parser.rl"
+#line 196 "parser.rl"
{
*result = Qnil;
}
goto st29;
tr37:
-#line 198 "parser.rl"
+#line 202 "parser.rl"
{
*result = Qtrue;
}
@@ -603,9 +607,9 @@ cs = 0;
if ( ++p == pe )
goto _test_eof29;
case 29:
-#line 250 "parser.rl"
+#line 254 "parser.rl"
{ p--; {p++; cs = 29; goto _out;} }
-#line 609 "parser.c"
+#line 613 "parser.c"
switch( (*p) ) {
case 13: goto st29;
case 32: goto st29;
@@ -846,7 +850,7 @@ case 28:
_out: {}
}
-#line 271 "parser.rl"
+#line 275 "parser.rl"
if (json->freeze) {
OBJ_FREEZE(*result);
@@ -860,7 +864,7 @@ case 28:
}
-#line 864 "parser.c"
+#line 868 "parser.c"
enum {JSON_integer_start = 1};
enum {JSON_integer_first_final = 3};
enum {JSON_integer_error = 0};
@@ -868,7 +872,7 @@ enum {JSON_integer_error = 0};
enum {JSON_integer_en_main = 1};
-#line 291 "parser.rl"
+#line 295 "parser.rl"
static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result)
@@ -876,15 +880,15 @@ static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *res
int cs = EVIL;
-#line 880 "parser.c"
+#line 884 "parser.c"
{
cs = JSON_integer_start;
}
-#line 298 "parser.rl"
+#line 302 "parser.rl"
json->memo = p;
-#line 888 "parser.c"
+#line 892 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -918,14 +922,14 @@ case 3:
goto st0;
goto tr4;
tr4:
-#line 288 "parser.rl"
+#line 292 "parser.rl"
{ p--; {p++; cs = 4; goto _out;} }
goto st4;
st4:
if ( ++p == pe )
goto _test_eof4;
case 4:
-#line 929 "parser.c"
+#line 933 "parser.c"
goto st0;
st5:
if ( ++p == pe )
@@ -944,7 +948,7 @@ case 5:
_out: {}
}
-#line 300 "parser.rl"
+#line 304 "parser.rl"
if (cs >= JSON_integer_first_final) {
long len = p - json->memo;
@@ -959,7 +963,7 @@ case 5:
}
-#line 963 "parser.c"
+#line 967 "parser.c"
enum {JSON_float_start = 1};
enum {JSON_float_first_final = 8};
enum {JSON_float_error = 0};
@@ -967,7 +971,7 @@ enum {JSON_float_error = 0};
enum {JSON_float_en_main = 1};
-#line 325 "parser.rl"
+#line 329 "parser.rl"
static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result)
@@ -975,15 +979,15 @@ static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *resul
int cs = EVIL;
-#line 979 "parser.c"
+#line 983 "parser.c"
{
cs = JSON_float_start;
}
-#line 332 "parser.rl"
+#line 336 "parser.rl"
json->memo = p;
-#line 987 "parser.c"
+#line 991 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -1041,14 +1045,14 @@ case 8:
goto st0;
goto tr9;
tr9:
-#line 319 "parser.rl"
+#line 323 "parser.rl"
{ p--; {p++; cs = 9; goto _out;} }
goto st9;
st9:
if ( ++p == pe )
goto _test_eof9;
case 9:
-#line 1052 "parser.c"
+#line 1056 "parser.c"
goto st0;
st5:
if ( ++p == pe )
@@ -1109,7 +1113,7 @@ case 7:
_out: {}
}
-#line 334 "parser.rl"
+#line 338 "parser.rl"
if (cs >= JSON_float_first_final) {
VALUE mod = Qnil;
@@ -1160,7 +1164,7 @@ case 7:
-#line 1164 "parser.c"
+#line 1168 "parser.c"
enum {JSON_array_start = 1};
enum {JSON_array_first_final = 17};
enum {JSON_array_error = 0};
@@ -1168,7 +1172,7 @@ enum {JSON_array_error = 0};
enum {JSON_array_en_main = 1};
-#line 412 "parser.rl"
+#line 416 "parser.rl"
static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting)
@@ -1182,14 +1186,14 @@ static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *resul
*result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class);
-#line 1186 "parser.c"
+#line 1190 "parser.c"
{
cs = JSON_array_start;
}
-#line 425 "parser.rl"
+#line 429 "parser.rl"
-#line 1193 "parser.c"
+#line 1197 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -1228,7 +1232,7 @@ case 2:
goto st2;
goto st0;
tr2:
-#line 389 "parser.rl"
+#line 393 "parser.rl"
{
VALUE v = Qnil;
char *np = JSON_parse_value(json, p, pe, &v, current_nesting);
@@ -1248,7 +1252,7 @@ case 2:
if ( ++p == pe )
goto _test_eof3;
case 3:
-#line 1252 "parser.c"
+#line 1256 "parser.c"
switch( (*p) ) {
case 13: goto st3;
case 32: goto st3;
@@ -1348,14 +1352,14 @@ case 12:
goto st3;
goto st12;
tr4:
-#line 404 "parser.rl"
+#line 408 "parser.rl"
{ p--; {p++; cs = 17; goto _out;} }
goto st17;
st17:
if ( ++p == pe )
goto _test_eof17;
case 17:
-#line 1359 "parser.c"
+#line 1363 "parser.c"
goto st0;
st13:
if ( ++p == pe )
@@ -1411,7 +1415,7 @@ case 16:
_out: {}
}
-#line 426 "parser.rl"
+#line 430 "parser.rl"
if(cs >= JSON_array_first_final) {
return p + 1;
@@ -1578,7 +1582,7 @@ static VALUE json_string_unescape(char *string, char *stringEnd, int intern, int
}
-#line 1582 "parser.c"
+#line 1586 "parser.c"
enum {JSON_string_start = 1};
enum {JSON_string_first_final = 8};
enum {JSON_string_error = 0};
@@ -1586,7 +1590,7 @@ enum {JSON_string_error = 0};
enum {JSON_string_en_main = 1};
-#line 610 "parser.rl"
+#line 614 "parser.rl"
static int
@@ -1607,15 +1611,15 @@ static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *resu
VALUE match_string;
-#line 1611 "parser.c"
+#line 1615 "parser.c"
{
cs = JSON_string_start;
}
-#line 630 "parser.rl"
+#line 634 "parser.rl"
json->memo = p;
-#line 1619 "parser.c"
+#line 1623 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -1640,7 +1644,7 @@ case 2:
goto st0;
goto st2;
tr2:
-#line 597 "parser.rl"
+#line 601 "parser.rl"
{
*result = json_string_unescape(json->memo + 1, p, json->parsing_name || json-> freeze, json->parsing_name && json->symbolize_names);
if (NIL_P(*result)) {
@@ -1650,14 +1654,14 @@ case 2:
{p = (( p + 1))-1;}
}
}
-#line 607 "parser.rl"
+#line 611 "parser.rl"
{ p--; {p++; cs = 8; goto _out;} }
goto st8;
st8:
if ( ++p == pe )
goto _test_eof8;
case 8:
-#line 1661 "parser.c"
+#line 1665 "parser.c"
goto st0;
st3:
if ( ++p == pe )
@@ -1733,7 +1737,7 @@ case 7:
_out: {}
}
-#line 632 "parser.rl"
+#line 636 "parser.rl"
if (json->create_additions && RTEST(match_string = json->match_string)) {
VALUE klass;
@@ -1767,18 +1771,17 @@ case 7:
static VALUE convert_encoding(VALUE source)
{
-#ifdef HAVE_RUBY_ENCODING_H
- rb_encoding *enc = rb_enc_get(source);
- if (enc == rb_ascii8bit_encoding()) {
- if (OBJ_FROZEN(source)) {
- source = rb_str_dup(source);
- }
- FORCE_UTF8(source);
- } else {
- source = rb_str_conv_enc(source, rb_enc_get(source), rb_utf8_encoding());
- }
-#endif
+ int encindex = RB_ENCODING_GET(source);
+
+ if (encindex == utf8_encindex) {
return source;
+ }
+
+ if (encindex == binary_encindex) {
+ return rb_enc_associate_index(rb_str_dup(source), utf8_encindex);
+ }
+
+ return rb_str_conv_enc(source, rb_enc_from_index(encindex), rb_utf8_encoding());
}
/*
@@ -1906,7 +1909,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
}
-#line 1910 "parser.c"
+#line 1913 "parser.c"
enum {JSON_start = 1};
enum {JSON_first_final = 10};
enum {JSON_error = 0};
@@ -1914,7 +1917,7 @@ enum {JSON_error = 0};
enum {JSON_en_main = 1};
-#line 818 "parser.rl"
+#line 821 "parser.rl"
/*
@@ -1932,16 +1935,16 @@ static VALUE cParser_parse(VALUE self)
GET_PARSER;
-#line 1936 "parser.c"
+#line 1939 "parser.c"
{
cs = JSON_start;
}
-#line 835 "parser.rl"
+#line 838 "parser.rl"
p = json->source;
pe = p + json->len;
-#line 1945 "parser.c"
+#line 1948 "parser.c"
{
if ( p == pe )
goto _test_eof;
@@ -1975,7 +1978,7 @@ case 1:
cs = 0;
goto _out;
tr2:
-#line 810 "parser.rl"
+#line 813 "parser.rl"
{
char *np = JSON_parse_value(json, p, pe, &result, 0);
if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;}
@@ -1985,7 +1988,7 @@ cs = 0;
if ( ++p == pe )
goto _test_eof10;
case 10:
-#line 1989 "parser.c"
+#line 1992 "parser.c"
switch( (*p) ) {
case 13: goto st10;
case 32: goto st10;
@@ -2074,7 +2077,7 @@ case 9:
_out: {}
}
-#line 838 "parser.rl"
+#line 841 "parser.rl"
if (cs >= JSON_first_final && p == pe) {
return result;
@@ -2177,7 +2180,6 @@ void Init_parser(void)
i_decimal_class = rb_intern("decimal_class");
i_match = rb_intern("match");
i_match_string = rb_intern("match_string");
- i_key_p = rb_intern("key?");
i_deep_const_get = rb_intern("deep_const_get");
i_aset = rb_intern("[]=");
i_aref = rb_intern("[]");
@@ -2186,6 +2188,9 @@ void Init_parser(void)
i_try_convert = rb_intern("try_convert");
i_freeze = rb_intern("freeze");
i_uminus = rb_intern("-@");
+
+ binary_encindex = rb_ascii8bit_encindex();
+ utf8_encindex = rb_utf8_encindex();
}
/*
diff --git a/ext/json/parser/parser.h b/ext/json/parser/parser.h
index f6974461ae478c..9c7f0e7d21c32b 100644
--- a/ext/json/parser/parser.h
+++ b/ext/json/parser/parser.h
@@ -3,21 +3,11 @@
#include "ruby.h"
-#ifndef HAVE_RUBY_RE_H
-#include "re.h"
-#endif
-
-#ifdef HAVE_RUBY_ST_H
-#include "ruby/st.h"
-#else
-#include "st.h"
-#endif
-
#ifndef MAYBE_UNUSED
# define MAYBE_UNUSED(x) x
#endif
-#define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key))
+#define option_given_p(opts, key) (rb_hash_lookup2(opts, key, Qundef) != Qundef)
typedef struct JSON_ParserStruct {
VALUE Vsource;
@@ -64,15 +54,6 @@ static void JSON_mark(void *json);
static void JSON_free(void *json);
static VALUE cJSON_parser_s_allocate(VALUE klass);
static VALUE cParser_source(VALUE self);
-#ifndef ZALLOC
-#define ZALLOC(type) ((type *)ruby_zalloc(sizeof(type)))
-static inline void *ruby_zalloc(size_t n)
-{
- void *p = ruby_xmalloc(n);
- memset(p, 0, n);
- return p;
-}
-#endif
static const rb_data_type_t JSON_Parser_type;
diff --git a/ext/json/parser/parser.rl b/ext/json/parser/parser.rl
index 7c41883d88fb31..61674b7812dfe9 100644
--- a/ext/json/parser/parser.rl
+++ b/ext/json/parser/parser.rl
@@ -72,10 +72,14 @@ static VALUE CNaN, CInfinity, CMinusInfinity;
static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
i_chr, i_max_nesting, i_allow_nan, i_symbolize_names,
- i_object_class, i_array_class, i_decimal_class, i_key_p,
+ i_object_class, i_array_class, i_decimal_class,
i_deep_const_get, i_match, i_match_string, i_aset, i_aref,
i_leftshift, i_new, i_try_convert, i_freeze, i_uminus;
+static int binary_encindex;
+static int utf8_encindex;
+
+
%%{
machine JSON_common;
@@ -662,18 +666,17 @@ static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *resu
static VALUE convert_encoding(VALUE source)
{
-#ifdef HAVE_RUBY_ENCODING_H
- rb_encoding *enc = rb_enc_get(source);
- if (enc == rb_ascii8bit_encoding()) {
- if (OBJ_FROZEN(source)) {
- source = rb_str_dup(source);
- }
- FORCE_UTF8(source);
- } else {
- source = rb_str_conv_enc(source, rb_enc_get(source), rb_utf8_encoding());
- }
-#endif
+ int encindex = RB_ENCODING_GET(source);
+
+ if (encindex == utf8_encindex) {
return source;
+ }
+
+ if (encindex == binary_encindex) {
+ return rb_enc_associate_index(rb_str_dup(source), utf8_encindex);
+ }
+
+ return rb_str_conv_enc(source, rb_enc_from_index(encindex), rb_utf8_encoding());
}
/*
@@ -937,7 +940,6 @@ void Init_parser(void)
i_decimal_class = rb_intern("decimal_class");
i_match = rb_intern("match");
i_match_string = rb_intern("match_string");
- i_key_p = rb_intern("key?");
i_deep_const_get = rb_intern("deep_const_get");
i_aset = rb_intern("[]=");
i_aref = rb_intern("[]");
@@ -946,6 +948,9 @@ void Init_parser(void)
i_try_convert = rb_intern("try_convert");
i_freeze = rb_intern("freeze");
i_uminus = rb_intern("-@");
+
+ binary_encindex = rb_ascii8bit_encindex();
+ utf8_encindex = rb_utf8_encindex();
}
/*
diff --git a/ext/win32/lib/win32/registry.rb b/ext/win32/lib/win32/registry.rb
index e84653d9459455..d0cbb6afcfce9d 100644
--- a/ext/win32/lib/win32/registry.rb
+++ b/ext/win32/lib/win32/registry.rb
@@ -178,7 +178,7 @@ def initialize(code)
buff = WCHAR_NUL * 1024
lang = 0
begin
- len = FormatMessageW.call(0x1200, 0, code, lang, buff, 1024, 0)
+ len = FormatMessageW.call(0x1200, nil, code, lang, buff, 1024, nil)
msg = buff.byteslice(0, len * WCHAR_SIZE)
msg.delete!(WCHAR_CR)
msg.chomp!
@@ -198,7 +198,7 @@ def initialize(code)
#
class PredefinedKey < Registry
def initialize(hkey, keyname)
- @hkey = hkey
+ @hkey = Fiddle::Pointer.new(hkey)
@parent = nil
@keyname = keyname
@disposition = REG_OPENED_EXISTING_KEY
@@ -238,7 +238,7 @@ module API
"long RegDeleteKeyW(void *, void *)",
"long RegFlushKey(void *)",
"long RegCloseKey(void *)",
- "long RegQueryInfoKey(void *, void *, void *, void *, void *, void *, void *, void *, void *, void *, void *, void *)",
+ "long RegQueryInfoKeyW(void *, void *, void *, void *, void *, void *, void *, void *, void *, void *, void *, void *)",
].each do |fn|
cfunc = extern fn, :stdcall
const_set cfunc.name.intern, cfunc
@@ -285,7 +285,7 @@ def unpackqw(qw)
end
def make_wstr(str)
- str.encode(WCHAR)
+ (str+"\0").encode(WCHAR)
end
def OpenKey(hkey, name, opt, desired)
@@ -298,14 +298,14 @@ def CreateKey(hkey, name, opt, desired)
result = packhandle(0)
disp = packdw(0)
check RegCreateKeyExW.call(hkey, make_wstr(name), 0, 0, opt, desired,
- 0, result, disp)
+ nil, result, disp)
[ unpackhandle(result), unpackdw(disp) ]
end
def EnumValue(hkey, index)
name = WCHAR_NUL * Constants::MAX_KEY_LENGTH
size = packdw(Constants::MAX_KEY_LENGTH)
- check RegEnumValueW.call(hkey, index, name, size, 0, 0, 0, 0)
+ check RegEnumValueW.call(hkey, index, name, size, nil, nil, nil, nil)
name.byteslice(0, unpackdw(size) * WCHAR_SIZE)
end
@@ -313,7 +313,7 @@ def EnumKey(hkey, index)
name = WCHAR_NUL * Constants::MAX_KEY_LENGTH
size = packdw(Constants::MAX_KEY_LENGTH)
wtime = ' ' * 8
- check RegEnumKeyExW.call(hkey, index, name, size, 0, 0, 0, wtime)
+ check RegEnumKeyExW.call(hkey, index, name, size, nil, nil, nil, wtime)
[ name.byteslice(0, unpackdw(size) * WCHAR_SIZE), unpackqw(wtime) ]
end
@@ -321,9 +321,9 @@ def QueryValue(hkey, name)
type = packdw(0)
size = packdw(0)
name = make_wstr(name)
- check RegQueryValueExW.call(hkey, name, 0, type, 0, size)
+ check RegQueryValueExW.call(hkey, name, nil, type, nil, size)
data = "\0".b * unpackdw(size)
- check RegQueryValueExW.call(hkey, name, 0, type, data, size)
+ check RegQueryValueExW.call(hkey, name, nil, type, data, size)
[ unpackdw(type), data[0, unpackdw(size)] ]
end
@@ -360,7 +360,7 @@ def QueryInfoKey(hkey)
maxvaluelen = packdw(0)
secdescs = packdw(0)
wtime = ' ' * 8
- check RegQueryInfoKey.call(hkey, 0, 0, 0, subkeys, maxsubkeylen, 0,
+ check RegQueryInfoKeyW.call(hkey, 0, 0, 0, subkeys, maxsubkeylen, 0,
values, maxvaluenamelen, maxvaluelen, secdescs, wtime)
[ unpackdw(subkeys), unpackdw(maxsubkeylen), unpackdw(values),
unpackdw(maxvaluenamelen), unpackdw(maxvaluelen),
@@ -430,7 +430,7 @@ def self.time2wtime(time)
# If block is given, the key is closed automatically.
def self.open(hkey, subkey, desired = KEY_READ, opt = REG_OPTION_RESERVED)
subkey = subkey.chomp('\\')
- newkey = API.OpenKey(hkey.hkey, subkey, opt, desired)
+ newkey = API.OpenKey(hkey.instance_variable_get(:@hkey), subkey, opt, desired)
obj = new(newkey, hkey, subkey, REG_OPENED_EXISTING_KEY)
if block_given?
begin
@@ -457,7 +457,7 @@ def self.open(hkey, subkey, desired = KEY_READ, opt = REG_OPTION_RESERVED)
# If block is given, the key is closed automatically.
#
def self.create(hkey, subkey, desired = KEY_ALL_ACCESS, opt = REG_OPTION_RESERVED)
- newkey, disp = API.CreateKey(hkey.hkey, subkey, opt, desired)
+ newkey, disp = API.CreateKey(hkey.instance_variable_get(:@hkey), subkey, opt, desired)
obj = new(newkey, hkey, subkey, disp)
if block_given?
begin
@@ -479,7 +479,7 @@ def self.create(hkey, subkey, desired = KEY_ALL_ACCESS, opt = REG_OPTION_RESERVE
# initialize
#
def initialize(hkey, parent, keyname, disposition)
- @hkey = hkey
+ @hkey = Fiddle::Pointer.new(hkey)
@parent = parent
@keyname = keyname
@disposition = disposition
@@ -487,8 +487,6 @@ def initialize(hkey, parent, keyname, disposition)
ObjectSpace.define_finalizer self, @@final.call(@hkeyfinal)
end
- # Returns key handle value.
- attr_reader :hkey
# Win32::Registry object of parent key, or nil if predefeined key.
attr_reader :parent
# Same as subkey value of Registry.open or
@@ -497,6 +495,11 @@ def initialize(hkey, parent, keyname, disposition)
# Disposition value (REG_CREATED_NEW_KEY or REG_OPENED_EXISTING_KEY).
attr_reader :disposition
+ # Returns key handle value.
+ def hkey
+ @hkey.to_i
+ end
+
#
# Returns if key is created ((*newly*)).
# (see Registry.create) -- basically you call create
diff --git a/ext/win32/win32-registry.gemspec b/ext/win32/win32-registry.gemspec
index b6df247574a3f5..ac5ff103d35c01 100644
--- a/ext/win32/win32-registry.gemspec
+++ b/ext/win32/win32-registry.gemspec
@@ -15,10 +15,13 @@ Gem::Specification.new do |spec|
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
+ excludes = %w[
+ bin/ test/ spec/ features/ rakelib/
+ .git .github .mailmap appveyor Rakefile Gemfile
+ ]
spec.files = Dir.chdir(__dir__) do
`git ls-files -z`.split("\x0").reject do |f|
- (File.expand_path(f) == __FILE__) ||
- f.start_with?(*%w[bin/ test/ spec/ features/ .git .github appveyor Gemfile])
+ File.identical?(f, __FILE__) || f.start_with?(*excludes)
end
end
spec.bindir = "exe"
diff --git a/gc.c b/gc.c
index dc0c712750f17d..9c2fec7e4f15cb 100644
--- a/gc.c
+++ b/gc.c
@@ -3139,6 +3139,20 @@ rb_gc_location(VALUE value)
return rb_gc_impl_location(rb_gc_get_objspace(), value);
}
+void
+rb_gc_prepare_heap_process_object(VALUE obj)
+{
+ switch (BUILTIN_TYPE(obj)) {
+ case T_STRING:
+ // Precompute the string coderange. This both save time for when it will be
+ // eventually needed, and avoid mutating heap pages after a potential fork.
+ rb_enc_str_coderange(obj);
+ break;
+ default:
+ break;
+ }
+}
+
void
rb_gc_prepare_heap(void)
{
diff --git a/gc/default.c b/gc/default.c
index 2a09c6c31cbf5e..7def18c015a455 100644
--- a/gc/default.c
+++ b/gc/default.c
@@ -6773,12 +6773,8 @@ gc_set_candidate_object_i(void *vstart, void *vend, size_t stride, void *data)
case T_NONE:
case T_ZOMBIE:
break;
- case T_STRING:
- // precompute the string coderange. This both save time for when it will be
- // eventually needed, and avoid mutating heap pages after a potential fork.
- rb_enc_str_coderange(v);
- // fall through
default:
+ rb_gc_prepare_heap_process_object(v);
if (!RVALUE_OLD_P(objspace, v) && !RVALUE_WB_UNPROTECTED(objspace, v)) {
RVALUE_AGE_SET_CANDIDATE(objspace, v);
}
diff --git a/gc/gc.h b/gc/gc.h
index f37ba749b21b24..c6e8c0f25e0657 100644
--- a/gc/gc.h
+++ b/gc/gc.h
@@ -71,6 +71,7 @@ uint32_t rb_gc_rebuild_shape(VALUE obj, size_t heap_id);
size_t rb_obj_memsize_of(VALUE obj);
short rb_gc_vm_weak_table_count(void);
void rb_gc_vm_weak_table_foreach(vm_table_foreach_callback_func callback, vm_table_update_callback_func update_callback, void *data, enum rb_gc_vm_weak_tables table);
+void rb_gc_prepare_heap_process_object(VALUE obj);
RUBY_SYMBOL_EXPORT_END
diff --git a/iseq.c b/iseq.c
index a60228c8e585eb..8c8e54b9895502 100644
--- a/iseq.c
+++ b/iseq.c
@@ -3029,7 +3029,10 @@ iseqw_s_of(VALUE klass, VALUE body)
{
const rb_iseq_t *iseq = NULL;
- if (rb_obj_is_proc(body)) {
+ if (rb_frame_info_p(body)) {
+ iseq = rb_get_iseq_from_frame_info(body);
+ }
+ else if (rb_obj_is_proc(body)) {
iseq = vm_proc_iseq(body);
if (!rb_obj_is_iseq((VALUE)iseq)) {
diff --git a/lib/bundler/cli/check.rb b/lib/bundler/cli/check.rb
index 2adf59d5d57e22..493eb3ec6a5ad6 100644
--- a/lib/bundler/cli/check.rb
+++ b/lib/bundler/cli/check.rb
@@ -15,7 +15,7 @@ def run
definition.validate_runtime!
begin
- definition.resolve_only_locally!
+ definition.check!
not_installed = definition.missing_specs
rescue GemNotFound, GitError, SolveFailure
Bundler.ui.error "Bundler can't satisfy your Gemfile's dependencies."
@@ -32,7 +32,7 @@ def run
Bundler.ui.error "This bundle has been frozen, but there is no #{SharedHelpers.relative_lockfile_path} present"
exit 1
else
- Bundler.load.lock(preserve_unknown_sections: true) unless options[:"dry-run"]
+ definition.lock(true) unless options[:"dry-run"]
Bundler.ui.info "The Gemfile's dependencies are satisfied"
end
end
diff --git a/lib/bundler/definition.rb b/lib/bundler/definition.rb
index bb6eb11e4eaf7f..66ca408c9a9ec4 100644
--- a/lib/bundler/definition.rb
+++ b/lib/bundler/definition.rb
@@ -19,7 +19,8 @@ class << self
:ruby_version,
:lockfile,
:gemfiles,
- :locked_checksums
+ :locked_checksums,
+ :sources
)
# Given a gemfile and lockfile creates a Bundler definition
@@ -162,7 +163,14 @@ def gem_version_promoter
@gem_version_promoter ||= GemVersionPromoter.new
end
- def resolve_only_locally!
+ def check!
+ # If dependencies have changed, we need to resolve remotely. Otherwise,
+ # since we'll be resolving with a single local source, we may end up
+ # locking gems under the wrong source in the lockfile, and missing lockfile
+ # checksums
+ resolve_remotely! if @dependency_changes
+
+ # Now do a local only resolve, to verify if any gems are missing locally
sources.local_only!
resolve
end
@@ -500,8 +508,6 @@ def unlocking?
private
- attr_reader :sources
-
def should_add_extra_platforms?
!lockfile_exists? && generic_local_platform_is_ruby? && !Bundler.settings[:force_ruby_platform]
end
@@ -888,8 +894,6 @@ def converge_specs(specs)
converged = []
deps = []
- @specs_that_changed_sources = []
-
specs.each do |s|
name = s.name
dep = @dependencies.find {|d| s.satisfies?(d) }
@@ -898,7 +902,6 @@ def converge_specs(specs)
if dep
gemfile_source = dep.source || default_source
- @specs_that_changed_sources << s if gemfile_source != lockfile_source
deps << dep if !dep.source || lockfile_source.include?(dep.source)
@gems_to_unlock << name if lockfile_source.include?(dep.source) && lockfile_source != gemfile_source
@@ -980,7 +983,6 @@ def find_source_requirements
source_requirements["bundler"] = sources.metadata_source # needs to come last to override
end
- verify_changed_sources!
source_requirements
end
@@ -988,14 +990,6 @@ def default_source
sources.default_source
end
- def verify_changed_sources!
- @specs_that_changed_sources.each do |s|
- if s.source.specs.search(s.name).empty?
- raise GemNotFound, "Could not find gem '#{s.name}' in #{s.source}"
- end
- end
- end
-
def requested_groups
values = groups - Bundler.settings[:without] - @optional_groups + Bundler.settings[:with]
values &= Bundler.settings[:only] unless Bundler.settings[:only].empty?
diff --git a/lib/bundler/dsl.rb b/lib/bundler/dsl.rb
index 24a9a7683a3d4a..df904f074adea5 100644
--- a/lib/bundler/dsl.rb
+++ b/lib/bundler/dsl.rb
@@ -110,9 +110,23 @@ def gem(name, *args)
if gemspec_dep
gemfile_dep = [dep, current].find(&:runtime?)
- unless current_requirement_open
+ if gemfile_dep && !current_requirement_open
Bundler.ui.warn "A gemspec development dependency (#{gemspec_dep.name}, #{gemspec_dep.requirement}) is being overridden by a Gemfile dependency (#{gemfile_dep.name}, #{gemfile_dep.requirement}).\n" \
"This behaviour may change in the future. Please remove either of them, or make sure they both have the same requirement\n"
+ elsif gemfile_dep.nil?
+ require_relative "vendor/pub_grub/lib/pub_grub/version_range"
+ require_relative "vendor/pub_grub/lib/pub_grub/version_constraint"
+ require_relative "vendor/pub_grub/lib/pub_grub/version_union"
+ require_relative "vendor/pub_grub/lib/pub_grub/rubygems"
+
+ current_gemspec_range = PubGrub::RubyGems.requirement_to_range(current.requirement)
+ next_gemspec_range = PubGrub::RubyGems.requirement_to_range(dep.requirement)
+
+ if current_gemspec_range.intersects?(next_gemspec_range)
+ dep = Dependency.new(name, current.requirement.as_list + dep.requirement.as_list, options)
+ else
+ raise GemfileError, "Two gemspecs have conflicting requirements on the same gem: #{dep} and #{current}"
+ end
end
else
update_prompt = ""
@@ -133,20 +147,22 @@ def gem(name, *args)
end
end
- # Always prefer the dependency from the Gemfile
- if current.gemspec_dev_dep?
- @dependencies.delete(current)
- elsif dep.gemspec_dev_dep?
- return
- elsif current.source != dep.source
- raise GemfileError, "You cannot specify the same gem twice coming from different sources.\n" \
- "You specified that #{dep.name} (#{dep.requirement}) should come from " \
- "#{current.source || "an unspecified source"} and #{dep.source}\n"
- else
- Bundler.ui.warn "Your Gemfile lists the gem #{current.name} (#{current.requirement}) more than once.\n" \
- "You should probably keep only one of them.\n" \
- "Remove any duplicate entries and specify the gem only once.\n" \
- "While it's not a problem now, it could cause errors if you change the version of one of them later."
+ unless current.gemspec_dev_dep? && dep.gemspec_dev_dep?
+ # Always prefer the dependency from the Gemfile
+ if current.gemspec_dev_dep?
+ @dependencies.delete(current)
+ elsif dep.gemspec_dev_dep?
+ return
+ elsif current.source != dep.source
+ raise GemfileError, "You cannot specify the same gem twice coming from different sources.\n" \
+ "You specified that #{dep.name} (#{dep.requirement}) should come from " \
+ "#{current.source || "an unspecified source"} and #{dep.source}\n"
+ else
+ Bundler.ui.warn "Your Gemfile lists the gem #{current.name} (#{current.requirement}) more than once.\n" \
+ "You should probably keep only one of them.\n" \
+ "Remove any duplicate entries and specify the gem only once.\n" \
+ "While it's not a problem now, it could cause errors if you change the version of one of them later."
+ end
end
end
diff --git a/lib/bundler/lazy_specification.rb b/lib/bundler/lazy_specification.rb
index 8669e021c218e3..74f7da188f7a78 100644
--- a/lib/bundler/lazy_specification.rb
+++ b/lib/bundler/lazy_specification.rb
@@ -27,11 +27,18 @@ def initialize(name, version, platform, source = nil)
@dependencies = []
@required_ruby_version = Gem::Requirement.default
@required_rubygems_version = Gem::Requirement.default
- @platform = platform || Gem::Platform::RUBY
- @source = source
+ @platform = platform || Gem::Platform::RUBY
+
+ @original_source = source
+ @source = source
+
@force_ruby_platform = default_force_ruby_platform
end
+ def source_changed?
+ @original_source != source
+ end
+
def full_name
@full_name ||= if platform == Gem::Platform::RUBY
"#{@name}-#{@version}"
diff --git a/lib/bundler/lockfile_generator.rb b/lib/bundler/lockfile_generator.rb
index a646d00ee1c409..904552fa0c8eb0 100644
--- a/lib/bundler/lockfile_generator.rb
+++ b/lib/bundler/lockfile_generator.rb
@@ -29,7 +29,7 @@ def generate!
private
def add_sources
- definition.send(:sources).lock_sources.each_with_index do |source, idx|
+ definition.sources.lock_sources.each_with_index do |source, idx|
out << "\n" unless idx.zero?
# Add the source header
diff --git a/lib/bundler/man/bundle-add.1 b/lib/bundler/man/bundle-add.1
index dae05bd9458701..b0c6d2cc90138b 100644
--- a/lib/bundler/man/bundle-add.1
+++ b/lib/bundler/man/bundle-add.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-ADD" "1" "September 2024" ""
+.TH "BUNDLE\-ADD" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-add\fR \- Add gem to the Gemfile and run bundle install
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-binstubs.1 b/lib/bundler/man/bundle-binstubs.1
index 56c9966e75c1ca..9b4b10d5916692 100644
--- a/lib/bundler/man/bundle-binstubs.1
+++ b/lib/bundler/man/bundle-binstubs.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-BINSTUBS" "1" "September 2024" ""
+.TH "BUNDLE\-BINSTUBS" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-binstubs\fR \- Install the binstubs of the listed gems
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-cache.1 b/lib/bundler/man/bundle-cache.1
index d634eef203ee37..ad26879bb95c8c 100644
--- a/lib/bundler/man/bundle-cache.1
+++ b/lib/bundler/man/bundle-cache.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-CACHE" "1" "September 2024" ""
+.TH "BUNDLE\-CACHE" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-cache\fR \- Package your needed \fB\.gem\fR files into your application
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-check.1 b/lib/bundler/man/bundle-check.1
index e15a41e4fddba4..be7b0abfcd05d8 100644
--- a/lib/bundler/man/bundle-check.1
+++ b/lib/bundler/man/bundle-check.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-CHECK" "1" "September 2024" ""
+.TH "BUNDLE\-CHECK" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-check\fR \- Verifies if dependencies are satisfied by installed gems
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-clean.1 b/lib/bundler/man/bundle-clean.1
index aa5ccf7594fffc..43029c780e5a82 100644
--- a/lib/bundler/man/bundle-clean.1
+++ b/lib/bundler/man/bundle-clean.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-CLEAN" "1" "September 2024" ""
+.TH "BUNDLE\-CLEAN" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-clean\fR \- Cleans up unused gems in your bundler directory
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-config.1 b/lib/bundler/man/bundle-config.1
index 47104fb5c65b4d..3b46955e0eabf7 100644
--- a/lib/bundler/man/bundle-config.1
+++ b/lib/bundler/man/bundle-config.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-CONFIG" "1" "September 2024" ""
+.TH "BUNDLE\-CONFIG" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-config\fR \- Set bundler configuration options
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-console.1 b/lib/bundler/man/bundle-console.1
index f2b2ddaed055bc..720318ec841b3a 100644
--- a/lib/bundler/man/bundle-console.1
+++ b/lib/bundler/man/bundle-console.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-CONSOLE" "1" "September 2024" ""
+.TH "BUNDLE\-CONSOLE" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-console\fR \- Deprecated way to open an IRB session with the bundle pre\-loaded
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-doctor.1 b/lib/bundler/man/bundle-doctor.1
index f225d0cd791fbe..21c1777241e936 100644
--- a/lib/bundler/man/bundle-doctor.1
+++ b/lib/bundler/man/bundle-doctor.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-DOCTOR" "1" "September 2024" ""
+.TH "BUNDLE\-DOCTOR" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-doctor\fR \- Checks the bundle for common problems
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-exec.1 b/lib/bundler/man/bundle-exec.1
index e16b7bc7474b56..a4e74e6b60e96b 100644
--- a/lib/bundler/man/bundle-exec.1
+++ b/lib/bundler/man/bundle-exec.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-EXEC" "1" "September 2024" ""
+.TH "BUNDLE\-EXEC" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-exec\fR \- Execute a command in the context of the bundle
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-gem.1 b/lib/bundler/man/bundle-gem.1
index e6e58cd409382f..54be9f4891dd5b 100644
--- a/lib/bundler/man/bundle-gem.1
+++ b/lib/bundler/man/bundle-gem.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-GEM" "1" "September 2024" ""
+.TH "BUNDLE\-GEM" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-gem\fR \- Generate a project skeleton for creating a rubygem
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-help.1 b/lib/bundler/man/bundle-help.1
index d7a05f824e972f..7058ed8172cccf 100644
--- a/lib/bundler/man/bundle-help.1
+++ b/lib/bundler/man/bundle-help.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-HELP" "1" "September 2024" ""
+.TH "BUNDLE\-HELP" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-help\fR \- Displays detailed help for each subcommand
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-info.1 b/lib/bundler/man/bundle-info.1
index 6b401a57f42bca..7a47949c029cf1 100644
--- a/lib/bundler/man/bundle-info.1
+++ b/lib/bundler/man/bundle-info.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-INFO" "1" "September 2024" ""
+.TH "BUNDLE\-INFO" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-info\fR \- Show information for the given gem in your bundle
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-init.1 b/lib/bundler/man/bundle-init.1
index f2e444c7c283f8..96b2a7d78f9209 100644
--- a/lib/bundler/man/bundle-init.1
+++ b/lib/bundler/man/bundle-init.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-INIT" "1" "September 2024" ""
+.TH "BUNDLE\-INIT" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-init\fR \- Generates a Gemfile into the current working directory
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-inject.1 b/lib/bundler/man/bundle-inject.1
index 8eb3633837cf0c..20e4a824c4b91b 100644
--- a/lib/bundler/man/bundle-inject.1
+++ b/lib/bundler/man/bundle-inject.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-INJECT" "1" "September 2024" ""
+.TH "BUNDLE\-INJECT" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-inject\fR \- Add named gem(s) with version requirements to Gemfile
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-install.1 b/lib/bundler/man/bundle-install.1
index 7539d18f81fd9b..ffdbec98c22c91 100644
--- a/lib/bundler/man/bundle-install.1
+++ b/lib/bundler/man/bundle-install.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-INSTALL" "1" "September 2024" ""
+.TH "BUNDLE\-INSTALL" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-install\fR \- Install the dependencies specified in your Gemfile
.SH "SYNOPSIS"
@@ -66,7 +66,7 @@ The location to install the specified gems to\. This defaults to Rubygems' setti
This option is deprecated in favor of the \fBpath\fR setting\.
.TP
\fB\-\-quiet\fR
-Do not print progress information to the standard output\. Instead, Bundler will exit using a status code (\fB$?\fR)\.
+Do not print progress information to the standard output\.
.TP
\fB\-\-retry=[]\fR
Retry failed network or git requests for \fInumber\fR times\.
diff --git a/lib/bundler/man/bundle-install.1.ronn b/lib/bundler/man/bundle-install.1.ronn
index ed8169de052a5a..1187455c92b7b3 100644
--- a/lib/bundler/man/bundle-install.1.ronn
+++ b/lib/bundler/man/bundle-install.1.ronn
@@ -136,8 +136,7 @@ automatically and that requires `bundler` to silently remember them. Since
This option is deprecated in favor of the `path` setting.
* `--quiet`:
- Do not print progress information to the standard output. Instead, Bundler
- will exit using a status code (`$?`).
+ Do not print progress information to the standard output.
* `--retry=[]`:
Retry failed network or git requests for times.
diff --git a/lib/bundler/man/bundle-list.1 b/lib/bundler/man/bundle-list.1
index 5cbb1c3cfe6215..41d9de5e8c5df1 100644
--- a/lib/bundler/man/bundle-list.1
+++ b/lib/bundler/man/bundle-list.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-LIST" "1" "September 2024" ""
+.TH "BUNDLE\-LIST" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-list\fR \- List all the gems in the bundle
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-lock.1 b/lib/bundler/man/bundle-lock.1
index 5f0d43a9aa9041..2fa3dbaebbf1c8 100644
--- a/lib/bundler/man/bundle-lock.1
+++ b/lib/bundler/man/bundle-lock.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-LOCK" "1" "September 2024" ""
+.TH "BUNDLE\-LOCK" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-lock\fR \- Creates / Updates a lockfile without installing
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-open.1 b/lib/bundler/man/bundle-open.1
index fb5ff1fee71cb3..befe20caa59e17 100644
--- a/lib/bundler/man/bundle-open.1
+++ b/lib/bundler/man/bundle-open.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-OPEN" "1" "September 2024" ""
+.TH "BUNDLE\-OPEN" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-open\fR \- Opens the source directory for a gem in your bundle
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-outdated.1 b/lib/bundler/man/bundle-outdated.1
index ea3005dd87d829..4028aa943afbd2 100644
--- a/lib/bundler/man/bundle-outdated.1
+++ b/lib/bundler/man/bundle-outdated.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-OUTDATED" "1" "September 2024" ""
+.TH "BUNDLE\-OUTDATED" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-outdated\fR \- List installed gems with newer versions available
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-platform.1 b/lib/bundler/man/bundle-platform.1
index c3058175fc8d75..9436800771b267 100644
--- a/lib/bundler/man/bundle-platform.1
+++ b/lib/bundler/man/bundle-platform.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-PLATFORM" "1" "September 2024" ""
+.TH "BUNDLE\-PLATFORM" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-platform\fR \- Displays platform compatibility information
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-plugin.1 b/lib/bundler/man/bundle-plugin.1
index 34437d99739e19..ffaac44558524f 100644
--- a/lib/bundler/man/bundle-plugin.1
+++ b/lib/bundler/man/bundle-plugin.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-PLUGIN" "1" "September 2024" ""
+.TH "BUNDLE\-PLUGIN" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-plugin\fR \- Manage Bundler plugins
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-pristine.1 b/lib/bundler/man/bundle-pristine.1
index 103c6f68ae040b..91deb5c72b59d8 100644
--- a/lib/bundler/man/bundle-pristine.1
+++ b/lib/bundler/man/bundle-pristine.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-PRISTINE" "1" "September 2024" ""
+.TH "BUNDLE\-PRISTINE" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-pristine\fR \- Restores installed gems to their pristine condition
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-remove.1 b/lib/bundler/man/bundle-remove.1
index 4a2ed4eb132007..eaf3e624c9087f 100644
--- a/lib/bundler/man/bundle-remove.1
+++ b/lib/bundler/man/bundle-remove.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-REMOVE" "1" "September 2024" ""
+.TH "BUNDLE\-REMOVE" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-remove\fR \- Removes gems from the Gemfile
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-show.1 b/lib/bundler/man/bundle-show.1
index dfbb43921855b3..3e48ca67f1febc 100644
--- a/lib/bundler/man/bundle-show.1
+++ b/lib/bundler/man/bundle-show.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-SHOW" "1" "September 2024" ""
+.TH "BUNDLE\-SHOW" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-show\fR \- Shows all the gems in your bundle, or the path to a gem
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-update.1 b/lib/bundler/man/bundle-update.1
index 5eb9514f03e82c..55cd40bfe4be25 100644
--- a/lib/bundler/man/bundle-update.1
+++ b/lib/bundler/man/bundle-update.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-UPDATE" "1" "September 2024" ""
+.TH "BUNDLE\-UPDATE" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-update\fR \- Update your gems to the latest available versions
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-version.1 b/lib/bundler/man/bundle-version.1
index a29858181a8ba9..003931cb4784ef 100644
--- a/lib/bundler/man/bundle-version.1
+++ b/lib/bundler/man/bundle-version.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-VERSION" "1" "September 2024" ""
+.TH "BUNDLE\-VERSION" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-version\fR \- Prints Bundler version information
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle-viz.1 b/lib/bundler/man/bundle-viz.1
index 9609e098dd85d5..2848ba18777bb4 100644
--- a/lib/bundler/man/bundle-viz.1
+++ b/lib/bundler/man/bundle-viz.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE\-VIZ" "1" "September 2024" ""
+.TH "BUNDLE\-VIZ" "1" "October 2024" ""
.SH "NAME"
\fBbundle\-viz\fR \- Generates a visual dependency graph for your Gemfile
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/bundle.1 b/lib/bundler/man/bundle.1
index d84d788748a348..688c29961a7e43 100644
--- a/lib/bundler/man/bundle.1
+++ b/lib/bundler/man/bundle.1
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "BUNDLE" "1" "September 2024" ""
+.TH "BUNDLE" "1" "October 2024" ""
.SH "NAME"
\fBbundle\fR \- Ruby Dependency Management
.SH "SYNOPSIS"
diff --git a/lib/bundler/man/gemfile.5 b/lib/bundler/man/gemfile.5
index f24a1c540d904e..dcf4b34c5f7aa3 100644
--- a/lib/bundler/man/gemfile.5
+++ b/lib/bundler/man/gemfile.5
@@ -1,6 +1,6 @@
.\" generated with nRonn/v0.11.1
.\" https://github.com/n-ronn/nronn/tree/0.11.1
-.TH "GEMFILE" "5" "September 2024" ""
+.TH "GEMFILE" "5" "October 2024" ""
.SH "NAME"
\fBGemfile\fR \- A format for describing gem dependencies for Ruby programs
.SH "SYNOPSIS"
diff --git a/lib/bundler/resolver/base.rb b/lib/bundler/resolver/base.rb
index 3f2436672aa780..4c2aed32de24c6 100644
--- a/lib/bundler/resolver/base.rb
+++ b/lib/bundler/resolver/base.rb
@@ -107,6 +107,10 @@ def unrestrict_names(names)
def build_base_requirements
base_requirements = {}
@base.each do |ls|
+ if ls.source_changed? && ls.source.specs.search(ls.name).empty?
+ raise GemNotFound, "Could not find gem '#{ls.name}' in #{ls.source}"
+ end
+
req = Gem::Requirement.new(ls.version)
base_requirements[ls.name] = req
end
diff --git a/lib/json/ext/generator/state.rb b/lib/json/ext/generator/state.rb
new file mode 100644
index 00000000000000..4f9675d7b5ba16
--- /dev/null
+++ b/lib/json/ext/generator/state.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+module JSON
+ module Ext
+ module Generator
+ class State
+ # call-seq: new(opts = {})
+ #
+ # Instantiates a new State object, configured by _opts_.
+ #
+ # _opts_ can have the following keys:
+ #
+ # * *indent*: a string used to indent levels (default: ''),
+ # * *space*: a string that is put after, a : or , delimiter (default: ''),
+ # * *space_before*: a string that is put before a : pair delimiter (default: ''),
+ # * *object_nl*: a string that is put at the end of a JSON object (default: ''),
+ # * *array_nl*: a string that is put at the end of a JSON array (default: ''),
+ # * *allow_nan*: true if NaN, Infinity, and -Infinity should be
+ # generated, otherwise an exception is thrown, if these values are
+ # encountered. This options defaults to false.
+ # * *ascii_only*: true if only ASCII characters should be generated. This
+ # option defaults to false.
+ # * *buffer_initial_length*: sets the initial length of the generator's
+ # internal buffer.
+ def initialize(opts = nil)
+ if opts && !opts.empty?
+ configure(opts)
+ end
+ end
+
+ # call-seq: configure(opts)
+ #
+ # Configure this State instance with the Hash _opts_, and return
+ # itself.
+ def configure(opts)
+ unless opts.is_a?(Hash)
+ if opts.respond_to?(:to_hash)
+ opts = opts.to_hash
+ elsif opts.respond_to?(:to_h)
+ opts = opts.to_h
+ else
+ raise TypeError, "can't convert #{opts.class} into Hash"
+ end
+ end
+
+ opts.each do |key, value|
+ case key
+ when :indent
+ self.indent = value
+ when :space
+ self.space = value
+ when :space_before
+ self.space_before = value
+ when :array_nl
+ self.array_nl = value
+ when :object_nl
+ self.object_nl = value
+ when :max_nesting
+ self.max_nesting = value || 0
+ when :depth
+ self.depth = value
+ when :buffer_initial_length
+ self.buffer_initial_length = value
+ when :allow_nan
+ self.allow_nan = value
+ when :ascii_only
+ self.ascii_only = value
+ when :script_safe, :escape_slash
+ self.script_safe = value
+ when :strict
+ self.strict = value
+ end
+ end
+
+ self
+ end
+
+ alias_method :merge, :configure
+
+ # call-seq: to_h
+ #
+ # Returns the configuration instance variables as a hash, that can be
+ # passed to the configure method.
+ def to_h
+ result = {
+ indent: indent,
+ space: space,
+ space_before: space_before,
+ object_nl: object_nl,
+ array_nl: array_nl,
+ allow_nan: allow_nan?,
+ ascii_only: ascii_only?,
+ max_nesting: max_nesting,
+ script_safe: script_safe?,
+ strict: strict?,
+ depth: depth,
+ buffer_initial_length: buffer_initial_length,
+ }
+
+ instance_variables.each do |iv|
+ iv = iv.to_s[1..-1]
+ result[iv.to_sym] = self[iv]
+ end
+
+ result
+ end
+
+ alias_method :to_hash, :to_h
+
+ # call-seq: [](name)
+ #
+ # Returns the value returned by method +name+.
+ def [](name)
+ if respond_to?(name)
+ __send__(name)
+ else
+ instance_variable_get("@#{name}") if
+ instance_variables.include?("@#{name}".to_sym) # avoid warning
+ end
+ end
+
+ # call-seq: []=(name, value)
+ #
+ # Sets the attribute name to value.
+ def []=(name, value)
+ if respond_to?(name_writer = "#{name}=")
+ __send__ name_writer, value
+ else
+ instance_variable_set "@#{name}", value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/prism.rb b/lib/prism.rb
index 50b14a54861919..94f4c8ca5f8a55 100644
--- a/lib/prism.rb
+++ b/lib/prism.rb
@@ -24,6 +24,7 @@ module Prism
autoload :Pack, "prism/pack"
autoload :Pattern, "prism/pattern"
autoload :Reflection, "prism/reflection"
+ autoload :Relocation, "prism/relocation"
autoload :Serialize, "prism/serialize"
autoload :StringQuery, "prism/string_query"
autoload :Translation, "prism/translation"
diff --git a/lib/prism/prism.gemspec b/lib/prism/prism.gemspec
index 6123b71fc8ea8a..0f8881f24cbde2 100644
--- a/lib/prism/prism.gemspec
+++ b/lib/prism/prism.gemspec
@@ -35,6 +35,7 @@ Gem::Specification.new do |spec|
"docs/parser_translation.md",
"docs/parsing_rules.md",
"docs/releasing.md",
+ "docs/relocation.md",
"docs/ripper_translation.md",
"docs/ruby_api.md",
"docs/ruby_parser_translation.md",
@@ -88,6 +89,7 @@ Gem::Specification.new do |spec|
"lib/prism/polyfill/byteindex.rb",
"lib/prism/polyfill/unpack1.rb",
"lib/prism/reflection.rb",
+ "lib/prism/relocation.rb",
"lib/prism/serialize.rb",
"lib/prism/string_query.rb",
"lib/prism/translation.rb",
@@ -130,6 +132,7 @@ Gem::Specification.new do |spec|
"sig/prism/parse_result.rbs",
"sig/prism/pattern.rbs",
"sig/prism/reflection.rbs",
+ "sig/prism/relocation.rbs",
"sig/prism/serialize.rbs",
"sig/prism/string_query.rbs",
"sig/prism/visitor.rbs",
diff --git a/lib/prism/relocation.rb b/lib/prism/relocation.rb
new file mode 100644
index 00000000000000..ad914396f63272
--- /dev/null
+++ b/lib/prism/relocation.rb
@@ -0,0 +1,504 @@
+# frozen_string_literal: true
+
+module Prism
+ # Prism parses deterministically for the same input. This provides a nice
+ # property that is exposed through the #node_id API on nodes. Effectively this
+ # means that for the same input, these values will remain consistent every
+ # time the source is parsed. This means we can reparse the source same with a
+ # #node_id value and find the exact same node again.
+ #
+ # The Relocation module provides an API around this property. It allows you to
+ # "save" nodes and locations using a minimal amount of memory (just the
+ # node_id and a field identifier) and then reify them later.
+ module Relocation
+ # An entry in a repository that will lazily reify its values when they are
+ # first accessed.
+ class Entry
+ # Raised if a value that could potentially be on an entry is missing
+ # because it was either not configured on the repository or it has not yet
+ # been fetched.
+ class MissingValueError < StandardError
+ end
+
+ # Initialize a new entry with the given repository.
+ def initialize(repository)
+ @repository = repository
+ @values = nil
+ end
+
+ # Fetch the filepath of the value.
+ def filepath
+ fetch_value(:filepath)
+ end
+
+ # Fetch the start line of the value.
+ def start_line
+ fetch_value(:start_line)
+ end
+
+ # Fetch the end line of the value.
+ def end_line
+ fetch_value(:end_line)
+ end
+
+ # Fetch the start byte offset of the value.
+ def start_offset
+ fetch_value(:start_offset)
+ end
+
+ # Fetch the end byte offset of the value.
+ def end_offset
+ fetch_value(:end_offset)
+ end
+
+ # Fetch the start character offset of the value.
+ def start_character_offset
+ fetch_value(:start_character_offset)
+ end
+
+ # Fetch the end character offset of the value.
+ def end_character_offset
+ fetch_value(:end_character_offset)
+ end
+
+ # Fetch the start code units offset of the value, for the encoding that
+ # was configured on the repository.
+ def start_code_units_offset
+ fetch_value(:start_code_units_offset)
+ end
+
+ # Fetch the end code units offset of the value, for the encoding that was
+ # configured on the repository.
+ def end_code_units_offset
+ fetch_value(:end_code_units_offset)
+ end
+
+ # Fetch the start byte column of the value.
+ def start_column
+ fetch_value(:start_column)
+ end
+
+ # Fetch the end byte column of the value.
+ def end_column
+ fetch_value(:end_column)
+ end
+
+ # Fetch the start character column of the value.
+ def start_character_column
+ fetch_value(:start_character_column)
+ end
+
+ # Fetch the end character column of the value.
+ def end_character_column
+ fetch_value(:end_character_column)
+ end
+
+ # Fetch the start code units column of the value, for the encoding that
+ # was configured on the repository.
+ def start_code_units_column
+ fetch_value(:start_code_units_column)
+ end
+
+ # Fetch the end code units column of the value, for the encoding that was
+ # configured on the repository.
+ def end_code_units_column
+ fetch_value(:end_code_units_column)
+ end
+
+ # Fetch the leading comments of the value.
+ def leading_comments
+ fetch_value(:leading_comments)
+ end
+
+ # Fetch the trailing comments of the value.
+ def trailing_comments
+ fetch_value(:trailing_comments)
+ end
+
+ # Fetch the leading and trailing comments of the value.
+ def comments
+ leading_comments.concat(trailing_comments)
+ end
+
+ # Reify the values on this entry with the given values. This is an
+ # internal-only API that is called from the repository when it is time to
+ # reify the values.
+ def reify!(values) # :nodoc:
+ @repository = nil
+ @values = values
+ end
+
+ private
+
+ # Fetch a value from the entry, raising an error if it is missing.
+ def fetch_value(name)
+ values.fetch(name) do
+ raise MissingValueError, "No value for #{name}, make sure the " \
+ "repository has been properly configured"
+ end
+ end
+
+ # Return the values from the repository, reifying them if necessary.
+ def values
+ @values || (@repository.reify!; @values)
+ end
+ end
+
+ # Represents the source of a repository that will be reparsed.
+ class Source
+ # The value that will need to be reparsed.
+ attr_reader :value
+
+ # Initialize the source with the given value.
+ def initialize(value)
+ @value = value
+ end
+
+ # Reparse the value and return the parse result.
+ def result
+ raise NotImplementedError, "Subclasses must implement #result"
+ end
+
+ # Create a code units cache for the given encoding.
+ def code_units_cache(encoding)
+ result.code_units_cache(encoding)
+ end
+ end
+
+ # A source that is represented by a file path.
+ class SourceFilepath < Source
+ # Reparse the file and return the parse result.
+ def result
+ Prism.parse_file(value)
+ end
+ end
+
+ # A source that is represented by a string.
+ class SourceString < Source
+ # Reparse the string and return the parse result.
+ def result
+ Prism.parse(value)
+ end
+ end
+
+ # A field that represents the file path.
+ class FilepathField
+ # The file path that this field represents.
+ attr_reader :value
+
+ # Initialize a new field with the given file path.
+ def initialize(value)
+ @value = value
+ end
+
+ # Fetch the file path.
+ def fields(_value)
+ { filepath: value }
+ end
+ end
+
+ # A field representing the start and end lines.
+ class LinesField
+ # Fetches the start and end line of a value.
+ def fields(value)
+ { start_line: value.start_line, end_line: value.end_line }
+ end
+ end
+
+ # A field representing the start and end byte offsets.
+ class OffsetsField
+ # Fetches the start and end byte offset of a value.
+ def fields(value)
+ { start_offset: value.start_offset, end_offset: value.end_offset }
+ end
+ end
+
+ # A field representing the start and end character offsets.
+ class CharacterOffsetsField
+ # Fetches the start and end character offset of a value.
+ def fields(value)
+ {
+ start_character_offset: value.start_character_offset,
+ end_character_offset: value.end_character_offset
+ }
+ end
+ end
+
+ # A field representing the start and end code unit offsets.
+ class CodeUnitOffsetsField
+ # A pointer to the repository object that is used for lazily creating a
+ # code units cache.
+ attr_reader :repository
+
+ # The associated encoding for the code units.
+ attr_reader :encoding
+
+ # Initialize a new field with the associated repository and encoding.
+ def initialize(repository, encoding)
+ @repository = repository
+ @encoding = encoding
+ @cache = nil
+ end
+
+ # Fetches the start and end code units offset of a value for a particular
+ # encoding.
+ def fields(value)
+ {
+ start_code_units_offset: value.cached_start_code_units_offset(cache),
+ end_code_units_offset: value.cached_end_code_units_offset(cache)
+ }
+ end
+
+ private
+
+ # Lazily create a code units cache for the associated encoding.
+ def cache
+ @cache ||= repository.code_units_cache(encoding)
+ end
+ end
+
+ # A field representing the start and end byte columns.
+ class ColumnsField
+ # Fetches the start and end byte column of a value.
+ def fields(value)
+ { start_column: value.start_column, end_column: value.end_column }
+ end
+ end
+
+ # A field representing the start and end character columns.
+ class CharacterColumnsField
+ # Fetches the start and end character column of a value.
+ def fields(value)
+ {
+ start_character_column: value.start_character_column,
+ end_character_column: value.end_character_column
+ }
+ end
+ end
+
+ # A field representing the start and end code unit columns for a specific
+ # encoding.
+ class CodeUnitColumnsField
+ # The repository object that is used for lazily creating a code units
+ # cache.
+ attr_reader :repository
+
+ # The associated encoding for the code units.
+ attr_reader :encoding
+
+ # Initialize a new field with the associated repository and encoding.
+ def initialize(repository, encoding)
+ @repository = repository
+ @encoding = encoding
+ @cache = nil
+ end
+
+ # Fetches the start and end code units column of a value for a particular
+ # encoding.
+ def fields(value)
+ {
+ start_code_units_column: value.cached_start_code_units_column(cache),
+ end_code_units_column: value.cached_end_code_units_column(cache)
+ }
+ end
+
+ private
+
+ # Lazily create a code units cache for the associated encoding.
+ def cache
+ @cache ||= repository.code_units_cache(encoding)
+ end
+ end
+
+ # An abstract field used as the parent class of the two comments fields.
+ class CommentsField
+ # An object that represents a slice of a comment.
+ class Comment
+ # The slice of the comment.
+ attr_reader :slice
+
+ # Initialize a new comment with the given slice.
+ def initialize(slice)
+ @slice = slice
+ end
+ end
+
+ private
+
+ # Create comment objects from the given values.
+ def comments(values)
+ values.map { |value| Comment.new(value.slice) }
+ end
+ end
+
+ # A field representing the leading comments.
+ class LeadingCommentsField < CommentsField
+ # Fetches the leading comments of a value.
+ def fields(value)
+ { leading_comments: comments(value.leading_comments) }
+ end
+ end
+
+ # A field representing the trailing comments.
+ class TrailingCommentsField < CommentsField
+ # Fetches the trailing comments of a value.
+ def fields(value)
+ { trailing_comments: comments(value.trailing_comments) }
+ end
+ end
+
+ # A repository is a configured collection of fields and a set of entries
+ # that knows how to reparse a source and reify the values.
+ class Repository
+ # Raised when multiple fields of the same type are configured on the same
+ # repository.
+ class ConfigurationError < StandardError
+ end
+
+ # The source associated with this repository. This will be either a
+ # SourceFilepath (the most common use case) or a SourceString.
+ attr_reader :source
+
+ # The fields that have been configured on this repository.
+ attr_reader :fields
+
+ # The entries that have been saved on this repository.
+ attr_reader :entries
+
+ # Initialize a new repository with the given source.
+ def initialize(source)
+ @source = source
+ @fields = {}
+ @entries = Hash.new { |hash, node_id| hash[node_id] = {} }
+ end
+
+ # Create a code units cache for the given encoding from the source.
+ def code_units_cache(encoding)
+ source.code_units_cache(encoding)
+ end
+
+ # Configure the filepath field for this repository and return self.
+ def filepath
+ raise ConfigurationError, "Can only specify filepath for a filepath source" unless source.is_a?(SourceFilepath)
+ field(:filepath, FilepathField.new(source.value))
+ end
+
+ # Configure the lines field for this repository and return self.
+ def lines
+ field(:lines, LinesField.new)
+ end
+
+ # Configure the offsets field for this repository and return self.
+ def offsets
+ field(:offsets, OffsetsField.new)
+ end
+
+ # Configure the character offsets field for this repository and return
+ # self.
+ def character_offsets
+ field(:character_offsets, CharacterOffsetsField.new)
+ end
+
+ # Configure the code unit offsets field for this repository for a specific
+ # encoding and return self.
+ def code_unit_offsets(encoding)
+ field(:code_unit_offsets, CodeUnitOffsetsField.new(self, encoding))
+ end
+
+ # Configure the columns field for this repository and return self.
+ def columns
+ field(:columns, ColumnsField.new)
+ end
+
+ # Configure the character columns field for this repository and return
+ # self.
+ def character_columns
+ field(:character_columns, CharacterColumnsField.new)
+ end
+
+ # Configure the code unit columns field for this repository for a specific
+ # encoding and return self.
+ def code_unit_columns(encoding)
+ field(:code_unit_columns, CodeUnitColumnsField.new(self, encoding))
+ end
+
+ # Configure the leading comments field for this repository and return
+ # self.
+ def leading_comments
+ field(:leading_comments, LeadingCommentsField.new)
+ end
+
+ # Configure the trailing comments field for this repository and return
+ # self.
+ def trailing_comments
+ field(:trailing_comments, TrailingCommentsField.new)
+ end
+
+ # Configure both the leading and trailing comment fields for this
+ # repository and return self.
+ def comments
+ leading_comments.trailing_comments
+ end
+
+ # This method is called from nodes and locations when they want to enter
+ # themselves into the repository. It it internal-only and meant to be
+ # called from the #save* APIs.
+ def enter(node_id, field_name) # :nodoc:
+ entry = Entry.new(self)
+ @entries[node_id][field_name] = entry
+ entry
+ end
+
+ # This method is called from the entries in the repository when they need
+ # to reify their values. It is internal-only and meant to be called from
+ # the various value APIs.
+ def reify! # :nodoc:
+ result = source.result
+
+ # Attach the comments if they have been requested as part of the
+ # configuration of this repository.
+ if fields.key?(:leading_comments) || fields.key?(:trailing_comments)
+ result.attach_comments!
+ end
+
+ queue = [result.value] #: Array[Prism::node]
+ while (node = queue.shift)
+ @entries[node.node_id].each do |field_name, entry|
+ value = node.public_send(field_name)
+ values = {}
+
+ fields.each_value do |field|
+ values.merge!(field.fields(value))
+ end
+
+ entry.reify!(values)
+ end
+
+ queue.concat(node.compact_child_nodes)
+ end
+
+ @entries.clear
+ end
+
+ private
+
+ # Append the given field to the repository and return the repository so
+ # that these calls can be chained.
+ def field(name, value)
+ raise ConfigurationError, "Cannot specify multiple #{name} fields" if @fields.key?(name)
+ @fields[name] = value
+ self
+ end
+ end
+
+ # Create a new repository for the given filepath.
+ def self.filepath(value)
+ Repository.new(SourceFilepath.new(value))
+ end
+
+ # Create a new repository for the given string.
+ def self.string(value)
+ Repository.new(SourceString.new(value))
+ end
+ end
+end
diff --git a/lib/rdoc/code_object.rb b/lib/rdoc/code_object.rb
index aeb4b4762e88da..83997c2580e723 100644
--- a/lib/rdoc/code_object.rb
+++ b/lib/rdoc/code_object.rb
@@ -96,6 +96,11 @@ class RDoc::CodeObject
attr_accessor :viewer
+ ##
+ # When mixed-in to a class, this points to the Context in which it was originally defined.
+
+ attr_accessor :mixin_from
+
##
# Creates a new CodeObject that will document itself and its children
@@ -111,6 +116,7 @@ def initialize
@full_name = nil
@store = nil
@track_visibility = true
+ @mixin_from = nil
initialize_visibility
end
diff --git a/lib/rdoc/code_object/class_module.rb b/lib/rdoc/code_object/class_module.rb
index c69e14b5e4ba0b..a99acb895634fb 100644
--- a/lib/rdoc/code_object/class_module.rb
+++ b/lib/rdoc/code_object/class_module.rb
@@ -223,6 +223,7 @@ def comment= comment # :nodoc:
def complete min_visibility
update_aliases
remove_nodoc_children
+ embed_mixins
update_includes
remove_invisible min_visibility
end
@@ -798,4 +799,43 @@ def update_extends
extends.uniq!
end
+ def embed_mixins
+ return unless options.embed_mixins
+
+ includes.each do |include|
+ next if String === include.module
+ include.module.method_list.each do |code_object|
+ add_method(prepare_to_embed(code_object))
+ end
+ include.module.constants.each do |code_object|
+ add_constant(prepare_to_embed(code_object))
+ end
+ include.module.attributes.each do |code_object|
+ add_attribute(prepare_to_embed(code_object))
+ end
+ end
+
+ extends.each do |ext|
+ next if String === ext.module
+ ext.module.method_list.each do |code_object|
+ add_method(prepare_to_embed(code_object, true))
+ end
+ ext.module.attributes.each do |code_object|
+ add_attribute(prepare_to_embed(code_object, true))
+ end
+ end
+ end
+
+ private
+
+ def prepare_to_embed(code_object, singleton=false)
+ code_object = code_object.dup
+ code_object.mixin_from = code_object.parent
+ code_object.singleton = true if singleton
+ set_current_section(code_object.section.title, code_object.section.comment)
+ # add_method and add_attribute will reassign self's visibility back to the method/attribute
+ # so we need to sync self's visibility with the object's to properly retain that information
+ self.visibility = code_object.visibility
+ code_object
+ end
end
diff --git a/lib/rdoc/generator/template/darkfish/class.rhtml b/lib/rdoc/generator/template/darkfish/class.rhtml
index afac0c44cd4b4c..0bec9fc9ce3d87 100644
--- a/lib/rdoc/generator/template/darkfish/class.rhtml
+++ b/lib/rdoc/generator/template/darkfish/class.rhtml
@@ -18,7 +18,7 @@
-
+
<%= klass.type %> <%= klass.full_name %>
@@ -27,7 +27,7 @@
<%- klass.each_section do |section, constants, attributes| -%>
-
+
<%- if section.title then -%>
@@ -54,7 +54,13 @@
<%- constants.each do |const| -%>
+ <%- end -%>
<%- if attrib.comment then -%>
<%= attrib.description.strip %>
<%- else -%>
@@ -94,13 +105,13 @@
next if visibilities.empty?
visibilities.each do |visibility, methods|
next if methods.empty? %>
-
+