diff --git a/app/assets/stylesheets/_variables.scss b/app/assets/stylesheets/_variables.scss index 51afa04b5..d412b3ec7 100644 --- a/app/assets/stylesheets/_variables.scss +++ b/app/assets/stylesheets/_variables.scss @@ -18,6 +18,7 @@ $danger: #EB5959; $success: #2ECC71; $info: #58A09A; $brand: #4B68FF; +$brand-comp: #F05137; $data: ( diff --git a/app/assets/stylesheets/complaints.scss b/app/assets/stylesheets/complaints.scss index fdec7e561..8c679c338 100644 --- a/app/assets/stylesheets/complaints.scss +++ b/app/assets/stylesheets/complaints.scss @@ -60,3 +60,44 @@ .is-lead + h1.complaint-title { margin-top: -1rem; } + +.is-brand-comp { + color: $brand-comp; +} + +.with-subtitle { + margin-bottom: 0; +} + +.subtitle { + margin-top: 0; + font-weight: normal; + font-size: 1.2rem; +} + +.modules { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(20rem, 1fr)); + gap: 0.5rem; +} + +.module-widget { + border: 1px solid $muted-graphic; + border-radius: 0.2rem; + padding: 0.5rem; + color: $key !important; + + > h4 { + text-decoration: underline; + } + + > p { + text-decoration: none; + } + + &:hover { + background: $primary; + color: white !important; + text-decoration: none !important; + } +} diff --git a/app/controllers/complaints_controller.rb b/app/controllers/complaints_controller.rb index 3d8af4507..953cd9353 100644 --- a/app/controllers/complaints_controller.rb +++ b/app/controllers/complaints_controller.rb @@ -3,6 +3,7 @@ class ComplaintsController < ApplicationController before_action :access_check, only: [:show, :comment] before_action :write_access_check, only: [:self_assign, :update_status, :change_content_type] before_action :verify_staff, only: [:reports, :reporting] + before_action :training_access, only: [:training] def index render layout: 'without_sidebar' @@ -202,6 +203,16 @@ def reporting render layout: 'without_sidebar' end + def training + pages = Dir.glob(Rails.root.join('app', 'views', 'complaints', 'training', '*.html.erb')) + .map { |page| File.basename(page, '.html.erb') } + if pages.include?(params[:page]) + render "complaints/training/#{params[:page]}", layout: 'osa_training' + else + not_found! + end + end + private def access_check @@ -235,4 +246,10 @@ def set_complaint @complaint end + + def training_access + unless user_signed_in? && (current_user.staff? || current_user.at_least_moderator?) + not_found! + end + end end diff --git a/app/views/complaints/report.html.erb b/app/views/complaints/report.html.erb index 730ddedba..f608f10eb 100644 --- a/app/views/complaints/report.html.erb +++ b/app/views/complaints/report.html.erb @@ -6,7 +6,8 @@

Thank you for taking the time to make a report. If you've seen harmful, abusive, or illegal content on our communities, you can report this to us here. You can also use this page if you've received a message saying we've - classified your content as harmful, abusive, or illegal and you wish to contest it. + classified your content as harmful, abusive, or illegal and you wish to contest it, or if you have a complaint + about our processes or our compliance with our duties.

@@ -45,7 +46,8 @@ <%= label_tag :reported_url, 'Where is this content?' %>
Enter a URL or link to where we can find this content on our network. You can use the Copy Link button under - posts to get a direct link to a post. + posts to get a direct link to a post. Enter N/A if you are not complaining about specific content; add more + details below.
<%= text_field_tag :reported_url, nil, class: 'form-element', required: true %>
@@ -95,6 +97,11 @@ Tell us any additional information you have about this report. Is there any additional content we removed that you would like to include? Provide detailed reasoning explaining why you disagree with our classification. + <%= text_area_tag :content, nil, class: 'form-element', required: true, rows: 10 %> diff --git a/app/views/complaints/training/definitions.html.erb b/app/views/complaints/training/definitions.html.erb new file mode 100644 index 000000000..af53f7170 --- /dev/null +++ b/app/views/complaints/training/definitions.html.erb @@ -0,0 +1,274 @@ +

Last updated 12 March 2026

+ +

Online Safety Act

+

Definitions

+ +

+ It is important to understand what each type of content is exactly, so they are defined here. These are definitions + which we have written: the Act defines each type of content in terms of related criminal offences, which is more + complex than is necessary to deal with the content, so these descriptions are intended to provide a simple overview. +

+ +

Priority Illegal Content

+
+
+
+

Terrorism

+ Terrorism is considered a violent action or threat of action, designed to influence a government or intimidate the + public and advance a cause. Online terrorism content is any content made available to others online, which can + encourage or promote terrorism. +
+
+
+
+

Child Sexual Exploitation & Abuse (CSEA)

+
(a) Grooming
+ Grooming is the process of building a relationship or emotional connection with a child or young person so the + perpetrator can manipulate, exploit, and abuse them. +
(b) Image-based Child Sexual Abuse Material (CSAM)
+ Online CSAM includes material depicting sexual activity, or indecent or prohibited imagery of children and can + take the form of photographic images and videos, as well as non-photographic material, such as drawings and + animations. Children themselves may generate content that can be considered CSAM, which can cause them harm. UK + law enforcement refers to this as self-generated indecent imagery (SGII). +
(c) CSAM URLs
+ This is distinct from child sexual abuse material itself in that the content which is shared links to + CSAM rather than containing CSAM itself. +
+
+
+
+

Hate

+ Hate offences can be experienced by many people, in particular minorities and other protected groups. The offences + can be targeted at one or more individuals or wider communities. Online hate content may include threatening or + abusive words, behaviours, images, and other media. The Police and the Crown Prosecution Service (CPS) recognise + hate crime based on race, religion, disability, sexual orientation, and transgender identity. +
+
+
+
+

Harassment, stalking, threats, and abuse

+ The offences of harassment, stalking, threats, and abuse relate to unwanted behaviours that can cause alarm and + distress to other individuals or put them in fear of violence. A case can involve several types of behaviour, + which may include violent threats, cyberstalking, or conduct intended to put a person in fear of violence or cause + serious alarm or distress. +
+
+
+
+

Controlling or coercive behaviour

+ Controlling or coercive behaviour occurs where the victim-survivor and the perpetrator are personally connected, + the perpetrator repeatedly or continuously engages in behaviour that is controlling or coercive, and this + behaviour has a serious effect on the victim-survivor, putting them in fear of violence or causing serious alarm + or distress which has a substantial adverse effect on their usual day-to-day activities.
+
+ Coercive behaviour can be an act (or a pattern of acts) of assault, threats, humiliation and intimidation or other + abuse that is used to harm, punish, or frighten those in intimate or family contexts. Controlling behaviour + includes a range of acts designed to make a person subordinate and/or dependent by isolating them from sources of + support, exploiting their resources and capacities for personal gain, depriving them of the means needed for + independence, resistance, and escape, and regulating their everyday behaviour. +
+
+
+
+

Intimate image abuse

+ Most commonly, an ‘intimate image’ is a photograph or video where the person or people are depicted engaging or + participating or are present during a sexual act and/or where their genitals, buttocks, or breasts are exposed or + covered only with underwear. Intimate image abuse occurs when these intimate images are shared or distributed + without the consent of the person pictured; or when someone threatens to share or distribute these images or + videos without consent. +
+
+
+
+

Extreme pornography

+ ‘Extreme pornography’ is an umbrella term used in UK law to cover several categories of images which are illegal + to possess. Although the legislation varies slightly across legal systems in the UK, extreme pornography broadly + covers images which are produced principally for sexual arousal, and which depict extreme or obscene behaviours. + Possession involves having ‘custody or control’ over the content. Extreme pornographic content includes realistic + and explicit depictions of necrophilia, bestiality, acts threatening a person's life, acts that could result in + serious injury to specific parts of the body, rape, and assault by penetration. +
+
+
+
+

Sexual exploitation of adults

+ Sexual exploitation is the inducement of a commercial sex act generally by means of force, fraud, or coercion. + Online this frequently falls into two distinct categories: (a) individuals who are coerced or forced into sex + work, and (b) consenting adult sex workers who are controlled or exploited by another person. +
+
+
+
+

Human trafficking

+ Human trafficking encompasses a wide range of harmful activities. It can involve modern slavery, and victims and + survivors include adults and children. It is estimated that there were 122,000 people living in modern slavery in + the UK in 2021. Notable forms of human trafficking where harm can manifest online include sexual exploitation and + abuse, forced labour, and criminal exploitation such as county lines exportation of illegal drugs. +
+
+
+
+

Unlawful immigration

+ ‘Illegal entry’ means an individual entering the United Kingdom in breach of a deportation order, entering without + permission to remain, or without entry clearance when the individual needs it. A person commits the offence of + ‘unlawful immigration’ if they do an act which facilitates a breach or attempted breach of immigration law by an + individual who is not a national of the United Kingdom – and where they know or have reasonable cause for + believing this to be the case. It is usually encouraged by organised crime. Online aspects of unlawful immigration + could include the sale of counterfeit travel documents such as passports, visas and identification papers, as well + as the sale of crossings. +
+
+
+
+

Fraud and financial offences

+ Content in this category overlaps strongly with offences related to Proceeds of Crime. Fraud may occur online as + fraud by impersonation or false representation; purchase, investment, romance, or employment scams, or the + recruitment of money mules or launderers. +
+
+
+
+

Proceeds of crime

+ ‘Proceeds of crime’ is the term used for money or assets gained by criminals during their criminal activity and + money laundering. Examples of activities which involve the proceeds of crime online include people being recruited + as money mules to transfer illegally obtained money between bank accounts, discussion between criminals to arrange + money laundering, and stolen personal information (via other criminal activity) offered for sale which can be used + to commit or facilitate other types of fraud. +
+
+
+
+

Drugs and psychoactive substances

+ The supply of drugs and psychoactive substances is facilitated online by enabling suppliers to market their + products and connect with potential buyers. Many of the functionalities of online services that enable suppliers + of legal products to reach, engage and deal directly or indirectly with customers can also be used by those + selling illicit substances. +
+
+
+
+

Firearms, knives, and other weapons

+ The firearms, knives, and other weapons offences cover various matters relating to the online sale of a range of + firearms, knives, and other weapons. In the UK, firearms and certain offensive weapons are classified as restricted + or prohibited. Prohibited firearms and offensive weapons are subject to the strictest limitations on sale. +
+
+
+
+

Encouraging or assisting suicide

+ This offence takes place when an individual intentionally encourages or assists a person to (attempt to) end their + life. Content related to suicide is extremely sensitive; while there may be users who post this content to cause + harm to others, some users may post this content to find supportive communities, to express their own experiences + as part of a healing process or to attempt to help others. Users posting and engaging with this type of content + can include those in vulnerable circumstances who are themselves dealing with thoughts of suicide or self-harm, + as well as those who have recovered or are recovering from mental health challenges. +
+
+
+
+

Foreign interference

+ The new Foreign Interference Offence (FIO) has been designed to tackle malign activity carried out for, or on + behalf of, or intended to benefit, a foreign power. Prohibited conduct captured by this offence will include where + there is a misrepresentation of a person’s identity or purpose, or in the presentation of the information, for + example, through state-backed disinformation campaigns. +
+
+
+
+

Animal cruelty

+ Most acts of cruelty occur offline. The publication online of content relating to or depicting these offline acts + does not in itself cause the animal unnecessary suffering (or further suffering) and therefore cannot constitute + an offence under the Animal Welfare Act. However, the existence of online activities that encourage, assist, or + commit acts of animal cruelty may result in content being made available which may distress a user, or cause them + to engage in harmful or illegal behaviours and activities themselves. +
+
+
+ +

Non-Priority Illegal Content

+
+
+
+

Epilepsy trolling

+ Some individuals with epilepsy may have a physical reaction to online content; they may feel disorientated, + uncomfortable, or unwell after seeing certain images or patterns. The offence is sharing an image with the + intention to cause harm to an individual with epilepsy. +
+
+
+
+

Cyberflashing

+ The cyberflashing offence refers to the sending of a photograph or film of genitals, to cause alarm, distress, or + humiliation, or to obtain sexual gratification. In addition, cyberflashing can form part of a pattern of harmful + behaviour that includes other harms such as cyberstalking, harassment, and/or controlling or coercive behaviour. +
+
+
+
+

Encouraging or assisting serious self-harm

+ This offence takes place when an individual intentionally encourages or assists a person to carry out serious + self-harm. Posting of content that amounts to the offence of encouraging or assisting serious self-harm is often + found in proximity to posting of other types of content related to self-harm that do not amount to the offence. +
+
+
+
+

False communications

+ A person commits the false communications offence if they send a message, with no reasonable excuse to send it, + that they know to be false and intend for that message to cause harm. +
+
+
+
+

Obscene content showing torture of humans and animals

+ This is a non-priority offence covering depictions of cruelty which are so serious as to be obscene, but which may + not amount to another priority offence (such as terrorism or animal cruelty offences). This offence is important + because for pre-recorded content some illegal animal cruelty and human torture content may not necessarily amount + to a priority offence. +
+
+
+
+

Threatening communications

+ A person commits the threatening communications offence if they send a message that conveys a threat of death or + serious harm and, at the time of sending it, the sender intended the individual encountering the message to fear + that the threat would be carried out, or was reckless as to whether the individual encountering the message would + fear that the threat would be carried out. +
+
+
+ +

Service-specific illegal content risks

+

+ Our risk assessment has also identified some additional types of illegal content based on the platform's risk profile. +

+ +
+
+
+

De-anonymisation (“doxxing”)

+ De-anonymisation, known online as “doxxing”, is the posting of a user’s personal information by another user + without permission. This may include a user’s name, home or work address, email address, telephone number, + information about protected characteristics such as sexual orientation, or other personally-identifiable + information. This is often done with the intent to humiliate the victim, but in more serious cases may be done + with the intention to expose the victim to physical harm. +
+
+
+
+

Copyright infringement

+ Copyright infringement may occur online when content is researched or used from elsewhere as part of + user-generated content, in violation of the copyright on that content and not otherwise permitted by fair dealing + exceptions. It is also likely that the use of generative AI models is a strong risk factor for copyright + infringements, as these models often fail to appropriately reference their sources for users to consider whether a + copyright exists. +
+
+
+ +
+ <%= link_to osa_training_path('handling') do %> + Next
+ Handling Illegal Content » + <% end %> +
diff --git a/app/views/complaints/training/home.html.erb b/app/views/complaints/training/home.html.erb new file mode 100644 index 000000000..0ecadd1af --- /dev/null +++ b/app/views/complaints/training/home.html.erb @@ -0,0 +1,52 @@ +

Online Safety Act

+

Moderator Training

+ +

+ As part of our responsibilities under the Online Safety Act, we're obligated to provide training to all staff and + volunteers undertaking moderation duties. +

+

+ Take your training here. Completing all the modules will record that you have completed the training, but you can + revisit these pages at any time if you need guidance when making moderation decisions. +

+ +
+ <%= link_to osa_training_path('overview'), class: 'module-widget' do %> +

Overview

+

+ An overview of the Online Safety Act, our duties, and your responsibilities as a volunteer moderator. +

+ <% end %> + <%= link_to osa_training_path('illegal-content'), class: 'module-widget' do %> +

Priority & Non-Priority Illegal Content

+

+ An explanation of the difference between the 17 types of priority illegal content, and other applicable types of + non-priority illegal content. +

+ <% end %> + <%= link_to osa_training_path('definitions'), class: 'module-widget' do %> +

Definitions

+

+ Definitions of all the types of illegal content which apply to us. +

+ <% end %> + <%= link_to osa_training_path('handling'), class: 'module-widget' do %> +

Handling Illegal Content

+

+ Your responsibilities and the steps you need to take in response to identifying potentially illegal content. +

+ <% end %> + <%= link_to osa_training_path('higher-risk'), class: 'module-widget' do %> +

Higher-Risk Content

+

+ Some types of content are more likely to occur in our communities than others. More detail on those here. +

+ <% end %> + <%= link_to osa_training_path('conclusion'), class: 'module-widget' do %> +

Conclusion

+

+ Thank you for taking the time to complete this training. Mark it as complete here and come back here if you need + to refer back to it. +

+ <% end %> +
diff --git a/app/views/complaints/training/illegal-content.html.erb b/app/views/complaints/training/illegal-content.html.erb new file mode 100644 index 000000000..d55411938 --- /dev/null +++ b/app/views/complaints/training/illegal-content.html.erb @@ -0,0 +1,19 @@ +

Last updated 12 March 2026

+ +

Online Safety Act

+

Priority & Non-Priority Illegal Content

+ +

+ The Act sets out 17 types of priority illegal content, and a number of types of non-priority illegal content. We have + carried out a risk assessment for all types of priority illegal content, and applicable types of non-priority illegal + content, which details the likelihood and impact of each type of content on our platform specifically. +

+ + + +
+ <%= link_to osa_training_path('definitions') do %> + Next
+ Definitions » + <% end %> +
diff --git a/app/views/complaints/training/overview.html.erb b/app/views/complaints/training/overview.html.erb new file mode 100644 index 000000000..358c61976 --- /dev/null +++ b/app/views/complaints/training/overview.html.erb @@ -0,0 +1,64 @@ +

Last updated 12 March 2026

+ +

Online Safety Act

+

Overview

+ +

+ The Online Safety Act 2023 (available here) is a law + established in the UK in 2023 with the aim of improving online safety, particularly with regard to children, but with + wide-ranging effects for all online services. All services with UK users are required to comply with the Act. There + are ongoing cases which will define whether this is enforceable on non-UK entities in practice, but because Codidact + is a UK-based entity, we are clearly within scope and required to comply. +

+

+ The Act is enforced by the UK's communications regulator, Ofcom, which also sets out the Register of Risks and Codes + of Practice on which our approach is based. +

+ +

Types of service

+

+ The Act defines two types of service: search services and user-to-user services. User-to-user services are those where + users may interact with one another; this is where we fall. There are different requirements imposed on each kind of + service, which for user-to-user services primarily focus on preventing and removing harmful content, and protecting + users from related harms. +

+ +

Our responsibilities

+

+ Responsibility for compliance with the requirements of the Act obviously falls on us (meaning the Codidact Foundation + as the organisation running the platform). The Foundation designates one of the Board of Directors as a named + individual with ultimate responsibility for compliance with the Act, which is currently + ArtOfCode. +

+

+ One of our responsibilities is to ensure that our volunteer moderators (that's you) have an awareness of the Act and + are provided with appropriate training in order to equip them to handle any harmful content which may appear on the + platform. +

+ +

Your responsibilities

+

+ As a volunteer moderator, your job is to guide, curate, and set the tone for your community. Part of that job is + protecting the community from any unwanted content. The majority of the time, that might take the form of off-topic + posts, arguments between users, or handling flags for your attention. Unfortunately, it may also take the form of + harmful or illegal content covered by the Act, and one of your responsibilities is to ensure this is dealt with and + escalated appropriately. +

+

+ To be clear: we're not expecting you to handle harmful or illegal content alone. Our ask of you is + simple: if you identify something that you think would be covered by the Act, please: +

+ + +
+ <%= link_to osa_training_path('illegal-content') do %> + Next
+ Priority & Non-Priority Illegal Content » + <% end %> +
diff --git a/app/views/layouts/osa_training.html.erb b/app/views/layouts/osa_training.html.erb new file mode 100644 index 000000000..54cf9cb66 --- /dev/null +++ b/app/views/layouts/osa_training.html.erb @@ -0,0 +1,60 @@ + + + + <%= render 'layouts/head' %> + + +<%= render 'layouts/header' %> + +
+
+
+
+ <%= render 'shared/notices' %> + + <% if @first_visit_notice %> + <%= render 'notices/first_visit' %> + <% end %> + + <%= yield %> +
+
+ + +
+
+ +<%= render 'layouts/footer' %> + +<%= render 'layouts/matomo' %> + +<% if Rails.env.production? %> + +<% end %> + + diff --git a/config/config/safety_center.yml b/config/config/safety_center.yml index dffcad187..10ad2b2e1 100644 --- a/config/config/safety_center.yml +++ b/config/config/safety_center.yml @@ -45,16 +45,24 @@ outcomes: copyright: *illegal_upheld appeal: content: our community team agreed with your appeal and have reversed the action taken in your case. + process: + content: our community team have reviewed your complaint and found information to substantiate it. + actionable: name: Actionable - description: The content is actionable but the reported classification is not correct. NOT applicable to appeals. + description: The content is actionable but the reported classification is not correct. NOT applicable to appeals or + process complaints. user_facing: illegal: &illegal_actionable content: our community team agreed that the content you reported was actionable and have taken appropriate action, but have changed your classification of the content for reporting purposes. abusive: *illegal_actionable copyright: *illegal_actionable - appeal: ~ + appeal: + content: ~ + process: + content: ~ + disputed: name: Disputed description: The reporter's classification is disputed; the content does not appear to be actionable; in the case of @@ -69,6 +77,8 @@ outcomes: appeal: content: our community team have reviewed your appeal and have decided that the action taken in your case was appropriate. + process: + content: our community team have reviewed your complaint but found no information to substantiate it. report_types: illegal: @@ -87,6 +97,10 @@ report_types: enabled: true name: Classification Appeal description: an appeal regarding how we've handled your content + process: + enabled: true + name: Complaint about our process + description: a complaint about our processes or compliance with our duties # This list is sourced from Ofcom's list of the 17 types of priority illegal content, which is in turn sourced from # Schedules 5-7 of the Online Safety Act. diff --git a/config/routes.rb b/config/routes.rb index 05961caad..4719d354a 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -388,6 +388,7 @@ post 'report/:token/content_type', to: 'complaints#change_content_type', as: :update_complaint_content_type get 'reports', to: 'complaints#reports', as: :complaints get 'reporting', to: 'complaints#reporting', as: :complaints_reporting + get 'training/:page', to: 'complaints#training', as: :osa_training end get '403', to: 'errors#forbidden' diff --git a/db/schema.rb b/db/schema.rb index 8752d5420..2278fb412 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema[7.2].define(version: 2025_12_26_185531) do +ActiveRecord::Schema[7.2].define(version: 2026_02_08_223211) do create_table "abilities", charset: "utf8mb4", collation: "utf8mb4_0900_ai_ci", force: :cascade do |t| t.bigint "community_id" t.string "name"