shared.gradio['filter_by_loader']=gr.Dropdown(label="Filter by loader",choices=["All"]+list(loaders.loaders_and_params.keys()),value="All",elem_classes='slim-dropdown')
shared.gradio['dry_multiplier']=gr.Slider(0,5,value=generate_params['dry_multiplier'],step=0.01,label='dry_multiplier',info='Set to greater than 0 to enable DRY. Recommended value: 0.8.')
shared.gradio['dry_allowed_length']=gr.Slider(1,20,value=generate_params['dry_allowed_length'],step=1,label='dry_allowed_length',info='Longest sequence that can be repeated without being penalized.')
shared.gradio['dry_base']=gr.Slider(1,4,value=generate_params['dry_base'],step=0.01,label='dry_base',info='Controls how fast the penalty grows with increasing sequence length.')
shared.gradio['dry_sequence_breakers']=gr.Textbox(value=generate_params['dry_sequence_breakers'],label='dry_sequence_breakers',info='Tokens across which sequence matching is not continued. Specified as a comma-separated list of quoted strings.')
shared.gradio['xtc_threshold']=gr.Slider(0,0.5,value=generate_params['xtc_threshold'],step=0.01,label='xtc_threshold',info='If 2 or more tokens have probability above this threshold, consider removing all but the last one.')
shared.gradio['xtc_probability']=gr.Slider(0,1,value=generate_params['xtc_probability'],step=0.01,label='xtc_probability',info='Probability that the removal will actually happen. 0 disables the sampler. 1 makes it always happen.')
shared.gradio['auto_max_new_tokens']=gr.Checkbox(value=shared.settings['auto_max_new_tokens'],label='auto_max_new_tokens',info='Expand max_new_tokens to the available context length.')
shared.gradio['ban_eos_token']=gr.Checkbox(value=shared.settings['ban_eos_token'],label='Ban the eos_token',info='Forces the model to never end the generation prematurely.')
shared.gradio['add_bos_token']=gr.Checkbox(value=shared.settings['add_bos_token'],label='Add the bos_token to the beginning of prompts',info='Disabling this can make the replies more creative.')
shared.gradio['custom_stopping_strings']=gr.Textbox(lines=2,value=shared.settings["custom_stopping_strings"]orNone,label='Custom stopping strings',info='Written between "" and separated by commas.',placeholder='"\\n", "\\nYou:"')
shared.gradio['custom_token_bans']=gr.Textbox(value=shared.settings['custom_token_bans']orNone,label='Token bans',info='Token IDs to ban, separated by commas. The IDs can be found in the Default or Notebook tab.')
shared.gradio['penalty_alpha']=gr.Slider(0,5,value=generate_params['penalty_alpha'],label='penalty_alpha',info='For Contrastive Search. do_sample must be unchecked.')
shared.gradio['guidance_scale']=gr.Slider(-0.5,2.5,step=0.05,value=generate_params['guidance_scale'],label='guidance_scale',info='For CFG. 1.5 is a good value.')
shared.gradio['grammar_file']=gr.Dropdown(value='None',choices=utils.get_available_grammars(),label='Load grammar from file (.gbnf)',elem_classes='slim-dropdown')
shared.gradio['smoothing_curve']=gr.Slider(1.0,10.0,value=generate_params['smoothing_curve'],step=0.01,label='smoothing_curve',info='Adjusts the dropoff curve of Quadratic Sampling.')
shared.gradio['temperature_last']=gr.Checkbox(value=generate_params['temperature_last'],label='temperature_last',info='Moves temperature/dynamic temperature/quadratic sampling to the end of the sampler stack, ignoring their positions in "Sampler priority".')
shared.gradio['sampler_priority']=gr.Textbox(value=generate_params['sampler_priority'],lines=12,label='Sampler priority',info='Parameter names separated by new lines or commas.')
shared.gradio['truncation_length']=gr.Number(precision=0,step=256,value=get_truncation_length(),label='Truncate the prompt up to this length',info='The leftmost tokens are removed if the prompt exceeds this length. Most models require this to be at most 2048.')
shared.gradio['max_tokens_second']=gr.Slider(value=shared.settings['max_tokens_second'],minimum=0,maximum=20,step=1,label='Maximum tokens/second',info='To make text readable in real time.')
shared.gradio['max_updates_second']=gr.Slider(value=shared.settings['max_updates_second'],minimum=0,maximum=24,step=1,label='Maximum UI updates/second',info='Set this if you experience lag in the UI during streaming.')
shared.gradio['skip_special_tokens']=gr.Checkbox(value=shared.settings['skip_special_tokens'],label='Skip special tokens',info='Some specific models need this unset.')
shared.gradio['stream']=gr.Checkbox(value=shared.settings['stream'],label='Activate text streaming')