diff options
Diffstat (limited to 'refs/reftable-backend.c')
| -rw-r--r-- | refs/reftable-backend.c | 155 |
1 files changed, 143 insertions, 12 deletions
diff --git a/refs/reftable-backend.c b/refs/reftable-backend.c index efe2b0258c..3c96fbf66f 100644 --- a/refs/reftable-backend.c +++ b/refs/reftable-backend.c @@ -19,8 +19,10 @@ #include "../reftable/reftable-record.h" #include "../reftable/reftable-error.h" #include "../reftable/reftable-iterator.h" +#include "../repo-settings.h" #include "../setup.h" #include "../strmap.h" +#include "../trace2.h" #include "parse.h" #include "refs-internal.h" @@ -51,6 +53,7 @@ struct reftable_ref_store { struct reftable_write_options write_options; unsigned int store_flags; + enum log_refs_config log_all_ref_updates; int err; }; @@ -156,22 +159,23 @@ static struct reftable_stack *stack_for(struct reftable_ref_store *store, } } -static int should_write_log(struct ref_store *refs, const char *refname) +static int should_write_log(struct reftable_ref_store *refs, const char *refname) { - if (log_all_ref_updates == LOG_REFS_UNSET) - log_all_ref_updates = is_bare_repository() ? LOG_REFS_NONE : LOG_REFS_NORMAL; + enum log_refs_config log_refs_cfg = refs->log_all_ref_updates; + if (log_refs_cfg == LOG_REFS_UNSET) + log_refs_cfg = is_bare_repository() ? LOG_REFS_NONE : LOG_REFS_NORMAL; - switch (log_all_ref_updates) { + switch (log_refs_cfg) { case LOG_REFS_NONE: - return refs_reflog_exists(refs, refname); + return refs_reflog_exists(&refs->base, refname); case LOG_REFS_ALWAYS: return 1; case LOG_REFS_NORMAL: - if (should_autocreate_reflog(refname)) + if (should_autocreate_reflog(log_refs_cfg, refname)) return 1; - return refs_reflog_exists(refs, refname); + return refs_reflog_exists(&refs->base, refname); default: - BUG("unhandled core.logAllRefUpdates value %d", log_all_ref_updates); + BUG("unhandled core.logAllRefUpdates value %d", log_refs_cfg); } } @@ -283,6 +287,7 @@ static struct ref_store *reftable_be_init(struct repository *repo, base_ref_store_init(&refs->base, repo, gitdir, &refs_be_reftable); strmap_init(&refs->worktree_stacks); refs->store_flags = store_flags; + refs->log_all_ref_updates = repo_settings_get_log_all_ref_updates(repo); refs->write_options.hash_id = repo->hash_algo->format_id; refs->write_options.default_permissions = calc_shared_perm(0666 & ~mask); @@ -455,10 +460,81 @@ struct reftable_ref_iterator { const char *prefix; size_t prefix_len; + char **exclude_patterns; + size_t exclude_patterns_index; + size_t exclude_patterns_strlen; unsigned int flags; int err; }; +/* + * Handle exclude patterns. Returns either `1`, which tells the caller that the + * current reference shall not be shown. Or `0`, which indicates that it should + * be shown. + */ +static int should_exclude_current_ref(struct reftable_ref_iterator *iter) +{ + while (iter->exclude_patterns[iter->exclude_patterns_index]) { + const char *pattern = iter->exclude_patterns[iter->exclude_patterns_index]; + char *ref_after_pattern; + int cmp; + + /* + * Lazily cache the pattern length so that we don't have to + * recompute it every time this function is called. + */ + if (!iter->exclude_patterns_strlen) + iter->exclude_patterns_strlen = strlen(pattern); + + /* + * When the reference name is lexicographically bigger than the + * current exclude pattern we know that it won't ever match any + * of the following references, either. We thus advance to the + * next pattern and re-check whether it matches. + * + * Otherwise, if it's smaller, then we do not have a match and + * thus want to show the current reference. + */ + cmp = strncmp(iter->ref.refname, pattern, + iter->exclude_patterns_strlen); + if (cmp > 0) { + iter->exclude_patterns_index++; + iter->exclude_patterns_strlen = 0; + continue; + } + if (cmp < 0) + return 0; + + /* + * The reference shares a prefix with the exclude pattern and + * shall thus be omitted. We skip all references that match the + * pattern by seeking to the first reference after the block of + * matches. + * + * This is done by appending the highest possible character to + * the pattern. Consequently, all references that have the + * pattern as prefix and whose suffix starts with anything in + * the range [0x00, 0xfe] are skipped. And given that 0xff is a + * non-printable character that shouldn't ever be in a ref name, + * we'd not yield any such record, either. + * + * Note that the seeked-to reference may also be excluded. This + * is not handled here though, but the caller is expected to + * loop and re-verify the next reference for us. + */ + ref_after_pattern = xstrfmt("%s%c", pattern, 0xff); + iter->err = reftable_iterator_seek_ref(&iter->iter, ref_after_pattern); + iter->exclude_patterns_index++; + iter->exclude_patterns_strlen = 0; + trace2_counter_add(TRACE2_COUNTER_ID_REFTABLE_RESEEKS, 1); + + free(ref_after_pattern); + return 1; + } + + return 0; +} + static int reftable_ref_iterator_advance(struct ref_iterator *ref_iterator) { struct reftable_ref_iterator *iter = @@ -489,6 +565,9 @@ static int reftable_ref_iterator_advance(struct ref_iterator *ref_iterator) break; } + if (iter->exclude_patterns && should_exclude_current_ref(iter)) + continue; + if (iter->flags & DO_FOR_EACH_PER_WORKTREE_ONLY && parse_worktree_ref(iter->ref.refname, NULL, NULL, NULL) != REF_WORKTREE_CURRENT) @@ -578,6 +657,11 @@ static int reftable_ref_iterator_abort(struct ref_iterator *ref_iterator) (struct reftable_ref_iterator *)ref_iterator; reftable_ref_record_release(&iter->ref); reftable_iterator_destroy(&iter->iter); + if (iter->exclude_patterns) { + for (size_t i = 0; iter->exclude_patterns[i]; i++) + free(iter->exclude_patterns[i]); + free(iter->exclude_patterns); + } free(iter); return ITER_DONE; } @@ -588,9 +672,53 @@ static struct ref_iterator_vtable reftable_ref_iterator_vtable = { .abort = reftable_ref_iterator_abort }; +static int qsort_strcmp(const void *va, const void *vb) +{ + const char *a = *(const char **)va; + const char *b = *(const char **)vb; + return strcmp(a, b); +} + +static char **filter_exclude_patterns(const char **exclude_patterns) +{ + size_t filtered_size = 0, filtered_alloc = 0; + char **filtered = NULL; + + if (!exclude_patterns) + return NULL; + + for (size_t i = 0; ; i++) { + const char *exclude_pattern = exclude_patterns[i]; + int has_glob = 0; + + if (!exclude_pattern) + break; + + for (const char *p = exclude_pattern; *p; p++) { + has_glob = is_glob_special(*p); + if (has_glob) + break; + } + if (has_glob) + continue; + + ALLOC_GROW(filtered, filtered_size + 1, filtered_alloc); + filtered[filtered_size++] = xstrdup(exclude_pattern); + } + + if (filtered_size) { + QSORT(filtered, filtered_size, qsort_strcmp); + ALLOC_GROW(filtered, filtered_size + 1, filtered_alloc); + filtered[filtered_size++] = NULL; + } + + return filtered; +} + static struct reftable_ref_iterator *ref_iterator_for_stack(struct reftable_ref_store *refs, struct reftable_stack *stack, const char *prefix, + const char **exclude_patterns, int flags) { struct reftable_ref_iterator *iter; @@ -603,6 +731,7 @@ static struct reftable_ref_iterator *ref_iterator_for_stack(struct reftable_ref_ iter->base.oid = &iter->oid; iter->flags = flags; iter->refs = refs; + iter->exclude_patterns = filter_exclude_patterns(exclude_patterns); ret = refs->err; if (ret) @@ -624,7 +753,7 @@ done: static struct ref_iterator *reftable_be_iterator_begin(struct ref_store *ref_store, const char *prefix, - const char **exclude_patterns UNUSED, + const char **exclude_patterns, unsigned int flags) { struct reftable_ref_iterator *main_iter, *worktree_iter; @@ -635,7 +764,8 @@ static struct ref_iterator *reftable_be_iterator_begin(struct ref_store *ref_sto required_flags |= REF_STORE_ODB; refs = reftable_be_downcast(ref_store, required_flags, "ref_iterator_begin"); - main_iter = ref_iterator_for_stack(refs, refs->main_stack, prefix, flags); + main_iter = ref_iterator_for_stack(refs, refs->main_stack, prefix, + exclude_patterns, flags); /* * The worktree stack is only set when we're in an actual worktree @@ -649,7 +779,8 @@ static struct ref_iterator *reftable_be_iterator_begin(struct ref_store *ref_sto * Otherwise we merge both the common and the per-worktree refs into a * single iterator. */ - worktree_iter = ref_iterator_for_stack(refs, refs->worktree_stack, prefix, flags); + worktree_iter = ref_iterator_for_stack(refs, refs->worktree_stack, prefix, + exclude_patterns, flags); return merge_ref_iterator_begin(&worktree_iter->base, &main_iter->base, ref_iterator_select, NULL); } @@ -1227,7 +1358,7 @@ static int write_transaction_table(struct reftable_writer *writer, void *cb_data } else if (!(u->flags & REF_SKIP_CREATE_REFLOG) && (u->flags & REF_HAVE_NEW) && (u->flags & REF_FORCE_CREATE_REFLOG || - should_write_log(&arg->refs->base, u->refname))) { + should_write_log(arg->refs, u->refname))) { struct reftable_log_record *log; int create_reflog = 1; |
